Compare commits
3 Commits
e7e955634f
...
pankaj-dev
| Author | SHA1 | Date | |
|---|---|---|---|
| 5b557efd80 | |||
| 71ce8ca819 | |||
| 68a694d2c7 |
2
.env
2
.env
@@ -20,7 +20,7 @@ DB_HOST=127.0.0.1
|
||||
DB_PORT=3306
|
||||
DB_NAME=comparisondb
|
||||
DB_USER=root
|
||||
DB_PASSWORD=admin
|
||||
DB_PASSWORD=root
|
||||
|
||||
# DATABASE_URL=mysql+pymysql://root:root@localhost/comparisondb
|
||||
|
||||
|
||||
24
Dockerfile
24
Dockerfile
@@ -1,24 +0,0 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
gcc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements and install Python dependencies
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Create necessary directories
|
||||
RUN mkdir -p app/logs app/static/uploads app/static/downloads
|
||||
|
||||
# Expose port
|
||||
EXPOSE 5000
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "run.py"]
|
||||
@@ -1,6 +1,3 @@
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
from flask import Flask, redirect, url_for
|
||||
from app.config import Config
|
||||
from app.services.db_service import db
|
||||
@@ -12,9 +9,6 @@ def create_app():
|
||||
# Initialize extensions
|
||||
db.init_app(app)
|
||||
|
||||
# Configure logging
|
||||
setup_logging(app)
|
||||
|
||||
# Register blueprints
|
||||
register_blueprints(app)
|
||||
# Register error handlers
|
||||
@@ -28,68 +22,6 @@ def create_app():
|
||||
return app
|
||||
|
||||
|
||||
def setup_logging(app):
|
||||
"""Configure comprehensive logging for debugging"""
|
||||
|
||||
# Ensure logs directory exists
|
||||
logs_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'logs')
|
||||
os.makedirs(logs_dir, exist_ok=True)
|
||||
|
||||
# Create logger
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Remove existing handlers
|
||||
logger.handlers.clear()
|
||||
|
||||
# Create formatters
|
||||
detailed_formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
|
||||
# File handler for all logs
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(logs_dir, 'app.log'),
|
||||
maxBytes=10485760, # 10MB
|
||||
backupCount=5
|
||||
)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(detailed_formatter)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
# File handler for RA bill fetching only
|
||||
ra_bill_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(logs_dir, 'ra_bill_fetch.log'),
|
||||
maxBytes=5242880, # 5MB
|
||||
backupCount=5
|
||||
)
|
||||
ra_bill_handler.setLevel(logging.DEBUG)
|
||||
ra_bill_handler.setFormatter(detailed_formatter)
|
||||
|
||||
# Only attach to relevant loggers
|
||||
ra_loggers = ['app.routes.dashboard', 'app.routes.file_report']
|
||||
for logger_name in ra_loggers:
|
||||
logging.getLogger(logger_name).addHandler(ra_bill_handler)
|
||||
|
||||
# Console handler for important messages
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
console_formatter = logging.Formatter(
|
||||
'%(levelname)s - %(name)s - %(message)s'
|
||||
)
|
||||
console_handler.setFormatter(console_formatter)
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# Suppress verbose libraries
|
||||
logging.getLogger('werkzeug').setLevel(logging.WARNING)
|
||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
|
||||
|
||||
app.logger.info("Logging initialized successfully")
|
||||
app.logger.info(f"Log files location: {logs_dir}")
|
||||
|
||||
|
||||
|
||||
def register_blueprints(app):
|
||||
from app.routes.auth import auth_bp
|
||||
from app.routes.user import user_bp
|
||||
|
||||
@@ -1,308 +1,87 @@
|
||||
import logging
|
||||
from flask import Blueprint, render_template, session, redirect, url_for, jsonify, request
|
||||
from sqlalchemy import func, union_all
|
||||
from app import db
|
||||
from app.models.trench_excavation_model import TrenchExcavation
|
||||
from app.models.tr_ex_client_model import TrenchExcavationClient
|
||||
from app.models.manhole_excavation_model import ManholeExcavation
|
||||
from app.models.mh_ex_client_model import ManholeExcavationClient
|
||||
from app.models.laying_model import Laying
|
||||
from app.models.laying_client_model import LayingClient
|
||||
from app.models.subcontractor_model import Subcontractor
|
||||
import matplotlib
|
||||
matplotlib.use("Agg")
|
||||
|
||||
from flask import Blueprint, render_template, session, redirect, url_for
|
||||
import matplotlib.pyplot as plt
|
||||
import io
|
||||
import base64
|
||||
from app.utils.plot_utils import plot_to_base64
|
||||
from app.services.dashboard_service import DashboardService
|
||||
|
||||
dashboard_bp = Blueprint("dashboard", __name__, url_prefix="/dashboard")
|
||||
|
||||
# Configure logging for debugging
|
||||
logger = logging.getLogger(__name__)
|
||||
# dashboard_bp = Blueprint("dashboard", __name__)
|
||||
|
||||
# API to get dynamic filters from database
|
||||
@dashboard_bp.route("/api/filters")
|
||||
def get_filters():
|
||||
try:
|
||||
logger.info("=" * 60)
|
||||
logger.info("FETCHING RA BILLS - START")
|
||||
logger.info("=" * 60)
|
||||
# charts
|
||||
# def plot_to_base64():
|
||||
# img = io.BytesIO()
|
||||
# plt.savefig(img, format="png", bbox_inches="tight")
|
||||
# plt.close()
|
||||
# img.seek(0)
|
||||
# return base64.b64encode(img.getvalue()).decode()
|
||||
|
||||
# 1. Fetch Subcontractors (Linked to Trench entries)
|
||||
logger.info("Step 1: Fetching Subcontractors...")
|
||||
subcontractors = db.session.query(Subcontractor.subcontractor_name)\
|
||||
.join(TrenchExcavation, Subcontractor.id == TrenchExcavation.subcontractor_id)\
|
||||
.distinct().all()
|
||||
logger.info(f"✓ Subcontractors found: {len(subcontractors)}")
|
||||
logger.debug(f" Subcontractor list: {[s[0] for s in subcontractors if s[0]]}")
|
||||
# bar chart
|
||||
def bar_chart():
|
||||
categories = ["Trench", "Manhole", "Pipe Laying", "Restoration"]
|
||||
values = [120, 80, 150, 60]
|
||||
|
||||
# 2. Check total records in TrenchExcavation table
|
||||
logger.info("Step 2: Checking TrenchExcavation table...")
|
||||
total_records = db.session.query(TrenchExcavation).count()
|
||||
logger.info(f"✓ Total TrenchExcavation records: {total_records}")
|
||||
plt.figure()
|
||||
plt.bar(categories, values)
|
||||
plt.title("Work Category Report")
|
||||
plt.xlabel("test Category")
|
||||
plt.ylabel("test Quantity")
|
||||
|
||||
# 3. Check records with RA_Bill_No
|
||||
logger.info("Step 3: Checking records with RA_Bill_No...")
|
||||
records_with_ra = db.session.query(TrenchExcavation).filter(TrenchExcavation.RA_Bill_No != None).count()
|
||||
logger.info(f"✓ Records with RA_Bill_No (not null): {records_with_ra}")
|
||||
|
||||
# 4. Check for empty strings
|
||||
records_with_ra_and_value = db.session.query(TrenchExcavation).filter(
|
||||
TrenchExcavation.RA_Bill_No != None,
|
||||
TrenchExcavation.RA_Bill_No != ""
|
||||
).count()
|
||||
logger.info(f"✓ Records with RA_Bill_No (not null & not empty): {records_with_ra_and_value}")
|
||||
return plot_to_base64(plt)
|
||||
|
||||
# 5. Raw sample of RA_Bill_No values
|
||||
logger.info("Step 4: Sampling RA_Bill_No values from database...")
|
||||
sample_bills = db.session.query(TrenchExcavation.RA_Bill_No).limit(10).all()
|
||||
logger.debug(f" Sample RA_Bill_No values (Subcontractor): {[str(r[0]) for r in sample_bills]}")
|
||||
# Pie chart
|
||||
def pie_chart():
|
||||
labels = ["Completed", "In Progress", "Pending"]
|
||||
sizes = [55, 20, 25]
|
||||
|
||||
sample_bills_client = db.session.query(TrenchExcavationClient.RA_Bill_No).limit(10).all()
|
||||
logger.debug(f" Sample RA_Bill_No values (Client): {[str(r[0]) for r in sample_bills_client]}")
|
||||
plt.figure()
|
||||
plt.pie(sizes, labels=labels, autopct="%1.1f%%", startangle=140)
|
||||
plt.title("Project Status")
|
||||
|
||||
# 6. Fetch RA Bills from BOTH Subcontractor and Client tables
|
||||
logger.info("Step 5: Fetching distinct RA Bills from both Subcontractor and Client data...")
|
||||
return plot_to_base64(plt)
|
||||
|
||||
# Get RA bills from Subcontractor data
|
||||
subcon_ra_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != "")\
|
||||
.distinct()
|
||||
# Histogram chart
|
||||
def histogram_chart():
|
||||
daily_work = [5, 10, 15, 20, 20, 25, 30, 35, 40, 45, 50]
|
||||
|
||||
logger.debug(f" Subcontractor RA Bills (before union): {len(subcon_ra_bills.all())}")
|
||||
plt.figure()
|
||||
plt.hist(daily_work, bins=5)
|
||||
plt.title("Daily Work Distribution")
|
||||
plt.xlabel("Work Units")
|
||||
plt.ylabel("Frequency")
|
||||
|
||||
# Get RA bills from Client data
|
||||
client_ra_bills = db.session.query(TrenchExcavationClient.RA_Bill_No)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != "")\
|
||||
.distinct()
|
||||
|
||||
logger.debug(f" Client RA Bills (before union): {len(client_ra_bills.all())}")
|
||||
|
||||
# Union both queries to get all unique RA bills
|
||||
ra_bills_union = db.session.query(TrenchExcavation.RA_Bill_No)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != "")\
|
||||
.union(
|
||||
db.session.query(TrenchExcavationClient.RA_Bill_No)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != "")
|
||||
).order_by(TrenchExcavation.RA_Bill_No).all()
|
||||
|
||||
logger.info(f"✓ Distinct RA Bills found (Combined): {len(ra_bills_union)}")
|
||||
ra_bills_list = [r[0] for r in ra_bills_union if r[0]]
|
||||
logger.info(f" RA Bills list: {ra_bills_list}")
|
||||
|
||||
# 7. Debug: Check data types
|
||||
if ra_bills_union:
|
||||
logger.debug(f" First RA Bill value: {ra_bills_union[0][0]}")
|
||||
logger.debug(f" First RA Bill type: {type(ra_bills_union[0][0])}")
|
||||
|
||||
response = {
|
||||
"subcontractors": [s[0] for s in subcontractors if s[0]],
|
||||
"ra_bills": ra_bills_list
|
||||
}
|
||||
|
||||
logger.info(f"✓ Response prepared successfully")
|
||||
logger.info("=" * 60)
|
||||
|
||||
return jsonify(response)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 60)
|
||||
logger.error(f"ERROR in get_filters(): {str(e)}")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.exception("Full traceback:")
|
||||
logger.error("=" * 60)
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
# API for the live abstract data - handles multiple table types
|
||||
@dashboard_bp.route("/api/excavation-abstract")
|
||||
def excavation_abstract():
|
||||
try:
|
||||
logger.info("=" * 60)
|
||||
logger.info("EXCAVATION ABSTRACT FETCH - START")
|
||||
logger.info("=" * 60)
|
||||
|
||||
table_type = request.args.get('table_type', 'trench')
|
||||
subcon_name = request.args.get('subcontractor', 'All')
|
||||
ra_bill = request.args.get('ra_bill', 'Cumulative')
|
||||
|
||||
logger.info(f"Request Parameters:")
|
||||
logger.info(f" Table Type: {table_type}")
|
||||
logger.info(f" Subcontractor: {subcon_name}")
|
||||
logger.info(f" RA Bill: {ra_bill}")
|
||||
|
||||
# Select models and match keys based on table type
|
||||
if table_type == 'trench':
|
||||
SubconModel = TrenchExcavation
|
||||
ClientModel = TrenchExcavationClient
|
||||
table_label = "Trench Excavation"
|
||||
location_key = 'Location'
|
||||
mh_key = 'MH_NO'
|
||||
excavation_columns = [
|
||||
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
|
||||
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
|
||||
("Soft Murum", "3.0-4.5m", "Soft_Murum_3_0_to_4_5"),
|
||||
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
|
||||
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
|
||||
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
|
||||
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
|
||||
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "3.0-4.5m", "Hard_Rock_3_0_to_4_5"),
|
||||
("Hard Rock", "4.5-6.0m", "Hard_Rock_4_5_to_6_0"),
|
||||
("Hard Rock", "6.0-7.5m", "Hard_Rock_6_0_to_7_5"),
|
||||
]
|
||||
elif table_type == 'manhole':
|
||||
SubconModel = ManholeExcavation
|
||||
ClientModel = ManholeExcavationClient
|
||||
table_label = "Manhole Excavation"
|
||||
location_key = 'Location'
|
||||
mh_key = 'MH_NO'
|
||||
excavation_columns = [
|
||||
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
|
||||
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
|
||||
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
|
||||
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
|
||||
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
|
||||
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
|
||||
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
|
||||
]
|
||||
elif table_type == 'laying':
|
||||
SubconModel = Laying
|
||||
ClientModel = LayingClient
|
||||
table_label = "Laying"
|
||||
location_key = 'Location'
|
||||
mh_key = 'MH_NO'
|
||||
excavation_columns = [
|
||||
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
|
||||
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
|
||||
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
|
||||
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
|
||||
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
|
||||
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
|
||||
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
|
||||
]
|
||||
else:
|
||||
return jsonify({"error": f"Invalid table_type: {table_type}"}), 400
|
||||
|
||||
logger.info(f"Using table: {table_label}")
|
||||
|
||||
# ===== FETCH SUBCONTRACTOR DATA =====
|
||||
logger.info(f"Fetching Subcontractor data ({SubconModel.__tablename__})...")
|
||||
subcon_query = db.session.query(SubconModel)
|
||||
|
||||
# Check if SubconModel has subcontractor relationship
|
||||
if hasattr(SubconModel, 'subcontractor_id'):
|
||||
subcon_query = subcon_query.join(
|
||||
Subcontractor, Subcontractor.id == SubconModel.subcontractor_id
|
||||
)
|
||||
if subcon_name != 'All':
|
||||
subcon_query = subcon_query.filter(Subcontractor.subcontractor_name == subcon_name)
|
||||
|
||||
subcon_results = subcon_query.all()
|
||||
logger.info(f" Found {len(subcon_results)} subcontractor records")
|
||||
|
||||
# ===== FETCH CLIENT DATA =====
|
||||
logger.info(f"Fetching Client data ({ClientModel.__tablename__})...")
|
||||
client_query = db.session.query(ClientModel)
|
||||
|
||||
if ra_bill != 'Cumulative' and hasattr(ClientModel, 'RA_Bill_No'):
|
||||
client_query = client_query.filter(ClientModel.RA_Bill_No == ra_bill)
|
||||
|
||||
client_results = client_query.all()
|
||||
logger.info(f" Found {len(client_results)} client records")
|
||||
|
||||
# ===== MATCH RECORDS BY MH_NO AND LOCATION =====
|
||||
logger.info("Matching records by MH_NO and Location...")
|
||||
matched_data = {}
|
||||
|
||||
# Build a map of client records by MH_NO + Location
|
||||
client_map = {}
|
||||
for client_record in client_results:
|
||||
mh_no = getattr(client_record, mh_key)
|
||||
location = getattr(client_record, location_key)
|
||||
key = f"{location}|{mh_no}"
|
||||
client_map[key] = client_record
|
||||
|
||||
logger.info(f" Client map has {len(client_map)} unique MH_NO+Location combinations")
|
||||
|
||||
# Match subcontractor records with client records
|
||||
match_count = 0
|
||||
for subcon_record in subcon_results:
|
||||
mh_no = getattr(subcon_record, mh_key)
|
||||
location = getattr(subcon_record, location_key)
|
||||
key = f"{location}|{mh_no}"
|
||||
|
||||
# Only process if matching client record exists
|
||||
if key in client_map:
|
||||
match_count += 1
|
||||
client_record = client_map[key]
|
||||
|
||||
# Aggregate excavation data for this matched pair
|
||||
for soil, depth, col_name in excavation_columns:
|
||||
record_key = f"{soil}|{depth}|{location}|{mh_no}"
|
||||
|
||||
# Get values
|
||||
subcon_val = 0
|
||||
client_val = 0
|
||||
|
||||
if hasattr(subcon_record, col_name):
|
||||
subcon_val = getattr(subcon_record, col_name) or 0
|
||||
|
||||
if hasattr(client_record, col_name):
|
||||
client_val = getattr(client_record, col_name) or 0
|
||||
|
||||
# Only add if at least one has data
|
||||
if subcon_val > 0 or client_val > 0:
|
||||
if record_key not in matched_data:
|
||||
matched_data[record_key] = {
|
||||
"soil_type": soil,
|
||||
"depth": depth,
|
||||
"location": location,
|
||||
"mh_no": mh_no,
|
||||
"client_qty": 0,
|
||||
"subcon_qty": 0
|
||||
}
|
||||
matched_data[record_key]["client_qty"] += client_val
|
||||
matched_data[record_key]["subcon_qty"] += subcon_val
|
||||
|
||||
logger.info(f" Matched {match_count} subcontractor records with client records")
|
||||
logger.info(f" Found {len(matched_data)} excavation items with data")
|
||||
|
||||
# Calculate differences and format response
|
||||
data = []
|
||||
for key, item in matched_data.items():
|
||||
difference = item["subcon_qty"] - item["client_qty"]
|
||||
# Format label as: "Soft Murum 0-1.5m (Location - MH_NO)"
|
||||
label = f"{item['soil_type']} {item['depth']}"
|
||||
data.append({
|
||||
"label": label,
|
||||
"soil_type": item["soil_type"],
|
||||
"depth": item["depth"],
|
||||
"location": item["location"],
|
||||
"mh_no": item["mh_no"],
|
||||
"client_qty": round(item["client_qty"], 2),
|
||||
"subcon_qty": round(item["subcon_qty"], 2),
|
||||
"difference": round(difference, 2)
|
||||
})
|
||||
|
||||
# Sort by location and mh_no for consistency
|
||||
data.sort(key=lambda x: (x["location"], x["mh_no"], x["soil_type"], x["depth"]))
|
||||
|
||||
logger.info(f"Response prepared with {len(data)} matched records")
|
||||
logger.info("=" * 60)
|
||||
|
||||
return jsonify(data)
|
||||
except Exception as e:
|
||||
logger.error("=" * 60)
|
||||
logger.error(f"ERROR in excavation_abstract(): {str(e)}")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.exception("Full traceback:")
|
||||
logger.error("=" * 60)
|
||||
return jsonify({"error": str(e)}), 500
|
||||
return plot_to_base64(plt)
|
||||
|
||||
# Dashboaed page
|
||||
@dashboard_bp.route("/")
|
||||
def dashboard():
|
||||
if not session.get("user_id"):
|
||||
return redirect(url_for("auth.login"))
|
||||
return render_template("dashboard.html", title="Live Excavation Dashboard")
|
||||
|
||||
return render_template(
|
||||
"dashboard.html",
|
||||
title="Dashboard",
|
||||
bar_chart=bar_chart(),
|
||||
pie_chart=pie_chart(),
|
||||
histogram=histogram_chart()
|
||||
)
|
||||
|
||||
# subcontractor dashboard
|
||||
@dashboard_bp.route("/subcontractor_dashboard", methods=["GET", "POST"])
|
||||
def subcontractor_dashboard():
|
||||
if not session.get("user_id"):
|
||||
return redirect(url_for("auth.login"))
|
||||
|
||||
tr_dash = DashboardService().bar_chart_of_tr_ex
|
||||
|
||||
|
||||
return render_template(
|
||||
"subcontractor_dashboard.html",
|
||||
title="Dashboard",
|
||||
bar_chart=tr_dash
|
||||
)
|
||||
@@ -1,6 +1,5 @@
|
||||
import pandas as pd
|
||||
import io
|
||||
import logging
|
||||
from flask import Blueprint, render_template, request, send_file, flash
|
||||
from app.utils.helpers import login_required
|
||||
|
||||
@@ -20,9 +19,6 @@ from app.models.laying_client_model import LayingClient
|
||||
# --- BLUEPRINT DEFINITION ---
|
||||
file_report_bp = Blueprint("file_report", __name__, url_prefix="/file")
|
||||
|
||||
# Configure logging for debugging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# --- Client class ---
|
||||
class ClientBill:
|
||||
def __init__(self):
|
||||
@@ -32,57 +28,20 @@ class ClientBill:
|
||||
self.df_laying = pd.DataFrame()
|
||||
|
||||
def Fetch(self, RA_Bill_No):
|
||||
logger.info("=" * 60)
|
||||
logger.info("ClientBill.Fetch() - START")
|
||||
logger.info("=" * 60)
|
||||
logger.info(f"Fetching data for RA_Bill_No: '{RA_Bill_No}'")
|
||||
logger.debug(f" Type of RA_Bill_No: {type(RA_Bill_No)}")
|
||||
trench = TrenchExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
mh = ManholeExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
dc = ManholeDomesticChamberClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
lay = LayingClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
|
||||
try:
|
||||
logger.info("Step 1: Fetching TrenchExcavationClient records...")
|
||||
trench = TrenchExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
logger.info(f"✓ Trench records found: {len(trench)}")
|
||||
self.df_tr = pd.DataFrame([c.serialize() for c in trench])
|
||||
self.df_mh = pd.DataFrame([c.serialize() for c in mh])
|
||||
self.df_dc = pd.DataFrame([c.serialize() for c in dc])
|
||||
self.df_laying = pd.DataFrame([c.serialize() for c in lay])
|
||||
|
||||
logger.info("Step 2: Fetching ManholeExcavationClient records...")
|
||||
mh = ManholeExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
logger.info(f"✓ Manhole records found: {len(mh)}")
|
||||
|
||||
logger.info("Step 3: Fetching ManholeDomesticChamberClient records...")
|
||||
dc = ManholeDomesticChamberClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
logger.info(f"✓ Domestic Chamber records found: {len(dc)}")
|
||||
|
||||
logger.info("Step 4: Fetching LayingClient records...")
|
||||
lay = LayingClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
|
||||
logger.info(f"✓ Laying records found: {len(lay)}")
|
||||
|
||||
logger.info("Step 5: Converting to DataFrames...")
|
||||
self.df_tr = pd.DataFrame([c.serialize() for c in trench])
|
||||
self.df_mh = pd.DataFrame([c.serialize() for c in mh])
|
||||
self.df_dc = pd.DataFrame([c.serialize() for c in dc])
|
||||
self.df_laying = pd.DataFrame([c.serialize() for c in lay])
|
||||
|
||||
logger.debug(f" Trench DF shape: {self.df_tr.shape}")
|
||||
logger.debug(f" Manhole DF shape: {self.df_mh.shape}")
|
||||
logger.debug(f" Domestic Chamber DF shape: {self.df_dc.shape}")
|
||||
logger.debug(f" Laying DF shape: {self.df_laying.shape}")
|
||||
|
||||
logger.info("Step 6: Cleaning DataFrames...")
|
||||
drop_cols = ["id", "created_at", "_sa_instance_state"]
|
||||
for df in [self.df_tr, self.df_mh, self.df_dc, self.df_laying]:
|
||||
if not df.empty:
|
||||
df.drop(columns=drop_cols, errors="ignore", inplace=True)
|
||||
logger.debug(f" Cleaned DF with shape: {df.shape}")
|
||||
|
||||
logger.info("✓ ClientBill.Fetch() completed successfully")
|
||||
logger.info("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 60)
|
||||
logger.error(f"ERROR in ClientBill.Fetch(): {str(e)}")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.exception("Full traceback:")
|
||||
logger.error("=" * 60)
|
||||
raise
|
||||
drop_cols = ["id", "created_at", "_sa_instance_state"]
|
||||
for df in [self.df_tr, self.df_mh, self.df_dc, self.df_laying]:
|
||||
if not df.empty:
|
||||
df.drop(columns=drop_cols, errors="ignore", inplace=True)
|
||||
|
||||
# --- Subcontractor class ---
|
||||
class SubcontractorBill:
|
||||
@@ -93,68 +52,26 @@ class SubcontractorBill:
|
||||
self.df_laying = pd.DataFrame()
|
||||
|
||||
def Fetch(self, RA_Bill_No=None, subcontractor_id=None):
|
||||
logger.info("=" * 60)
|
||||
logger.info("SubcontractorBill.Fetch() - START")
|
||||
logger.info("=" * 60)
|
||||
logger.info(f"Parameters:")
|
||||
logger.info(f" RA_Bill_No: '{RA_Bill_No}' (type: {type(RA_Bill_No)})")
|
||||
logger.info(f" subcontractor_id: '{subcontractor_id}' (type: {type(subcontractor_id)})")
|
||||
filters = {}
|
||||
if subcontractor_id:
|
||||
filters["subcontractor_id"] = subcontractor_id
|
||||
if RA_Bill_No:
|
||||
filters["RA_Bill_No"] = RA_Bill_No
|
||||
|
||||
try:
|
||||
filters = {}
|
||||
if subcontractor_id:
|
||||
filters["subcontractor_id"] = subcontractor_id
|
||||
logger.debug(f" Added filter - subcontractor_id: {subcontractor_id}")
|
||||
if RA_Bill_No:
|
||||
filters["RA_Bill_No"] = RA_Bill_No
|
||||
logger.debug(f" Added filter - RA_Bill_No: {RA_Bill_No}")
|
||||
trench = TrenchExcavation.query.filter_by(**filters).all()
|
||||
mh = ManholeExcavation.query.filter_by(**filters).all()
|
||||
dc = ManholeDomesticChamber.query.filter_by(**filters).all()
|
||||
lay = Laying.query.filter_by(**filters).all()
|
||||
|
||||
logger.info(f"Applied filters: {filters}")
|
||||
self.df_tr = pd.DataFrame([c.serialize() for c in trench])
|
||||
self.df_mh = pd.DataFrame([c.serialize() for c in mh])
|
||||
self.df_dc = pd.DataFrame([c.serialize() for c in dc])
|
||||
self.df_laying = pd.DataFrame([c.serialize() for c in lay])
|
||||
|
||||
logger.info("Step 1: Fetching TrenchExcavation records...")
|
||||
trench = TrenchExcavation.query.filter_by(**filters).all()
|
||||
logger.info(f"✓ Trench records found: {len(trench)}")
|
||||
|
||||
logger.info("Step 2: Fetching ManholeExcavation records...")
|
||||
mh = ManholeExcavation.query.filter_by(**filters).all()
|
||||
logger.info(f"✓ Manhole records found: {len(mh)}")
|
||||
|
||||
logger.info("Step 3: Fetching ManholeDomesticChamber records...")
|
||||
dc = ManholeDomesticChamber.query.filter_by(**filters).all()
|
||||
logger.info(f"✓ Domestic Chamber records found: {len(dc)}")
|
||||
|
||||
logger.info("Step 4: Fetching Laying records...")
|
||||
lay = Laying.query.filter_by(**filters).all()
|
||||
logger.info(f"✓ Laying records found: {len(lay)}")
|
||||
|
||||
logger.info("Step 5: Converting to DataFrames...")
|
||||
self.df_tr = pd.DataFrame([c.serialize() for c in trench])
|
||||
self.df_mh = pd.DataFrame([c.serialize() for c in mh])
|
||||
self.df_dc = pd.DataFrame([c.serialize() for c in dc])
|
||||
self.df_laying = pd.DataFrame([c.serialize() for c in lay])
|
||||
|
||||
logger.debug(f" Trench DF shape: {self.df_tr.shape}")
|
||||
logger.debug(f" Manhole DF shape: {self.df_mh.shape}")
|
||||
logger.debug(f" Domestic Chamber DF shape: {self.df_dc.shape}")
|
||||
logger.debug(f" Laying DF shape: {self.df_laying.shape}")
|
||||
|
||||
logger.info("Step 6: Cleaning DataFrames...")
|
||||
drop_cols = ["id", "created_at", "_sa_instance_state"]
|
||||
for df in [self.df_tr, self.df_mh, self.df_dc, self.df_laying]:
|
||||
if not df.empty:
|
||||
df.drop(columns=drop_cols, errors="ignore", inplace=True)
|
||||
logger.debug(f" Cleaned DF with shape: {df.shape}")
|
||||
|
||||
logger.info("✓ SubcontractorBill.Fetch() completed successfully")
|
||||
logger.info("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 60)
|
||||
logger.error(f"ERROR in SubcontractorBill.Fetch(): {str(e)}")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.exception("Full traceback:")
|
||||
logger.error("=" * 60)
|
||||
raise
|
||||
drop_cols = ["id", "created_at", "_sa_instance_state"]
|
||||
for df in [self.df_tr, self.df_mh, self.df_dc, self.df_laying]:
|
||||
if not df.empty:
|
||||
df.drop(columns=drop_cols, errors="ignore", inplace=True)
|
||||
|
||||
|
||||
# --- subcontractor report only ---
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from flask import Blueprint, render_template, request, send_file, flash
|
||||
from collections import defaultdict
|
||||
import pandas as pd
|
||||
import io
|
||||
|
||||
@@ -66,19 +65,16 @@ def make_lookup(rows, key_field):
|
||||
key_val = normalize_key(r.get(key_field))
|
||||
|
||||
if location and key_val:
|
||||
lookup.setdefault((location, key_val), []).append(r)
|
||||
lookup[(location, key_val)] = r
|
||||
|
||||
return lookup
|
||||
|
||||
|
||||
|
||||
# COMPARISON BUILDER
|
||||
def build_comparison(client_rows, contractor_rows, key_field):
|
||||
contractor_lookup = make_lookup(contractor_rows, key_field)
|
||||
output = []
|
||||
|
||||
used_index = defaultdict(int) # 🔥 THIS FIXES YOUR ISSUE
|
||||
|
||||
for c in client_rows:
|
||||
client_location = normalize_key(c.get("Location"))
|
||||
client_key = normalize_key(c.get(key_field))
|
||||
@@ -86,54 +82,46 @@ def build_comparison(client_rows, contractor_rows, key_field):
|
||||
if not client_location or not client_key:
|
||||
continue
|
||||
|
||||
subs = contractor_lookup.get((client_location, client_key))
|
||||
if not subs:
|
||||
s = contractor_lookup.get((client_location, client_key))
|
||||
if not s:
|
||||
continue
|
||||
|
||||
idx = used_index[(client_location, client_key)]
|
||||
|
||||
# ❗ If subcontractor rows are exhausted, skip
|
||||
if idx >= len(subs):
|
||||
continue
|
||||
|
||||
s = subs[idx] # ✅ take NEXT subcontractor row
|
||||
used_index[(client_location, client_key)] += 1
|
||||
|
||||
# ---- totals ----
|
||||
client_total = sum(
|
||||
float(v or 0)
|
||||
for k, v in c.items()
|
||||
if k.endswith("_total")
|
||||
or D_RANGE_PATTERN.match(k)
|
||||
or PIPE_MM_PATTERN.match(k)
|
||||
if k.endswith("_total") or D_RANGE_PATTERN.match(k) or PIPE_MM_PATTERN.match(k)
|
||||
)
|
||||
|
||||
sub_total = sum(
|
||||
float(v or 0)
|
||||
for k, v in s.items()
|
||||
if k.endswith("_total")
|
||||
or D_RANGE_PATTERN.match(k)
|
||||
or PIPE_MM_PATTERN.match(k)
|
||||
if k.endswith("_total") or D_RANGE_PATTERN.match(k) or PIPE_MM_PATTERN.match(k)
|
||||
)
|
||||
|
||||
diff = client_total - sub_total
|
||||
|
||||
row = {
|
||||
"Location": client_location,
|
||||
key_field.replace("_", " "): client_key
|
||||
}
|
||||
|
||||
# CLIENT DATA
|
||||
for k, v in c.items():
|
||||
if k not in ["id", "created_at"]:
|
||||
row[f"Client-{k}"] = v
|
||||
if k in ["id", "created_at"]:
|
||||
continue
|
||||
row[f"Client-{k}"] = v
|
||||
|
||||
row["Client-Total"] = round(client_total, 2)
|
||||
row[" "] = ""
|
||||
|
||||
# SUBCONTRACTOR DATA
|
||||
for k, v in s.items():
|
||||
if k not in ["id", "created_at", "subcontractor_id"]:
|
||||
row[f"Subcontractor-{k}"] = v
|
||||
if k in ["id", "created_at", "subcontractor_id"]:
|
||||
continue
|
||||
row[f"Subcontractor-{k}"] = v
|
||||
|
||||
row["Subcontractor-Total"] = round(sub_total, 2)
|
||||
row["Diff"] = round(client_total - sub_total, 2)
|
||||
row["Diff"] = round(diff, 2)
|
||||
|
||||
output.append(row)
|
||||
|
||||
@@ -142,8 +130,6 @@ def build_comparison(client_rows, contractor_rows, key_field):
|
||||
return df
|
||||
|
||||
|
||||
|
||||
|
||||
# EXCEL SHEET WRITER
|
||||
def write_sheet(writer, df, sheet_name, subcontractor_name):
|
||||
workbook = writer.book
|
||||
@@ -349,4 +335,3 @@ def comparison_report():
|
||||
# df = pd.DataFrame(output)
|
||||
# df.columns = [format_header(col) for col in df.columns]
|
||||
# return df
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
<!-- Dashboard -->
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="/dashboard">
|
||||
<i class="bi bi-speedometer2 me-1"></i> Dashboard - Anish
|
||||
<i class="bi bi-speedometer2 me-1"></i> Dashboard
|
||||
</a>
|
||||
</li>
|
||||
|
||||
|
||||
@@ -1,375 +1,87 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container-fluid py-4" style="background-color: #f8f9fa;">
|
||||
<h3 class="mb-4 fw-bold text-uppercase">Abstract Excavation Dashboard</h3>
|
||||
|
||||
<div class="card shadow-sm mb-4">
|
||||
<div class="card-body bg-white">
|
||||
<div class="row g-3">
|
||||
<div class="col-md-2">
|
||||
<label class="form-label fw-bold">Comparison Type</label>
|
||||
<select id="filter-table" class="form-select" onchange="loadDashboardData()">
|
||||
<option value="trench">Trench Excavation</option>
|
||||
<option value="manhole">Manhole Excavation</option>
|
||||
<option value="laying">Laying</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label class="form-label fw-bold">Subcontractor</label>
|
||||
<select id="filter-subcon" class="form-select" onchange="loadDashboardData()">
|
||||
<option value="All">All Subcontractors</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-3">
|
||||
<label class="form-label fw-bold">RA Bill No</label>
|
||||
<select id="filter-ra" class="form-select" onchange="loadDashboardData()">
|
||||
<option value="Cumulative">Cumulative (All Bills)</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="col-md-4 d-flex align-items-end gap-2">
|
||||
<button class="btn btn-primary flex-grow-1" onclick="loadDashboardData()">🔄 Refresh</button>
|
||||
<button class="btn btn-secondary flex-grow-1" onclick="clearDashboard()">🗑️ Clear</button>
|
||||
<div class="container-fluid px-2 px-md-4">
|
||||
|
||||
<h4 class="mb-3 text-center text-md-start">Comparison Software Solapur(UGD) </h4>
|
||||
|
||||
<!-- Summary Cards -->
|
||||
<div class="row g-3 mb-4">
|
||||
|
||||
<!-- Total Work -->
|
||||
<div class="col-12 col-md-4">
|
||||
<div class="card text-white bg-primary shadow h-100">
|
||||
<div class="card-body text-center text-md-start">
|
||||
<h6>Test Total Work</h6>
|
||||
<h3 class="fw-bold">30%</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Empty State (Shown on page load) -->
|
||||
<div id="empty-state" class="alert alert-info text-center py-5">
|
||||
<h5>📊 Select filters to display data</h5>
|
||||
<p>Choose a Subcontractor and/or RA Bill to see the excavation abstract comparison.</p>
|
||||
</div>
|
||||
|
||||
<!-- Data Display Area (Hidden by default) -->
|
||||
<div id="data-area" style="display: none;">
|
||||
<div class="row">
|
||||
<div class="col-lg-8">
|
||||
<div class="card shadow-sm h-100">
|
||||
<div class="card-header bg-primary text-white fw-bold d-flex justify-content-between align-items-center">
|
||||
<span id="chart-title">Excavation Comparison: Client vs Subcontractor Qty</span>
|
||||
<small class=\"fw-normal\">(Horizontal Bar Chart)</small>
|
||||
</div>
|
||||
<div class=\"card-body\" style=\"position: relative; height: 700px; overflow-y: auto;\">
|
||||
<canvas id="groupedBarChart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-4">
|
||||
<div class="card shadow-sm h-100">
|
||||
<div class="card-header bg-success text-white fw-bold">Excavation Abstract Table</div>
|
||||
<div class="card-body p-0">
|
||||
<div class="table-responsive" style="max-height: 500px; overflow-y: auto;">
|
||||
<table class="table table-hover mb-0" id="abstract-table">
|
||||
<thead class="table-light sticky-top">
|
||||
<tr>
|
||||
<th class="small">Soil / Depth</th>
|
||||
<th class="small">Client (m³)</th>
|
||||
<th class="small">Subcon (m³)</th>
|
||||
<th class="small">Diff</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
<tfoot class="table-light fw-bold position-sticky bottom-0">
|
||||
<tr id="table-totals" class="bg-light"></tr>
|
||||
</tfoot>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Completed -->
|
||||
<div class="col-12 col-md-4">
|
||||
<div class="card text-white bg-success shadow h-100">
|
||||
<div class="card-body text-center text-md-start">
|
||||
<h6>test Completed</h6>
|
||||
<h3 class="fw-bold">35%</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Pending -->
|
||||
<div class="col-12 col-md-4">
|
||||
<div class="card text-dark bg-warning shadow h-100">
|
||||
<div class="card-body text-center text-md-start">
|
||||
<h6>Pending</h6>
|
||||
<h3 class="fw-bold">35%</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<!-- Charts -->
|
||||
<div class="row g-3">
|
||||
|
||||
<!-- Bar Chart -->
|
||||
<div class="col-12 col-md-6">
|
||||
<div class="card shadow-sm h-100">
|
||||
<div class="card-header bg-dark text-white text-center text-md-start">
|
||||
Work Category Bar Chart
|
||||
</div>
|
||||
<div class="card-body text-center">
|
||||
<img src="data:image/png;base64,{{ bar_chart }}" class="img-fluid" style="max-height:300px;">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Pie Chart -->
|
||||
<div class="col-12 col-md-6">
|
||||
<div class="card shadow-sm h-100">
|
||||
<div class="card-header bg-dark text-white text-center text-md-start">
|
||||
Project Status Pie Chart
|
||||
</div>
|
||||
<div class="card-body text-center">
|
||||
<img src="data:image/png;base64,{{ pie_chart }}" class="img-fluid" style="max-height:300px;">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Histogram -->
|
||||
<div class="col-12">
|
||||
<div class="card shadow-sm">
|
||||
<div class="card-header bg-dark text-white text-center text-md-start">
|
||||
Daily Work Histogram
|
||||
</div>
|
||||
<div class="card-body text-center">
|
||||
<img src="data:image/png;base64,{{ histogram }}" class="img-fluid" style="max-height:350px;">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
||||
|
||||
<script>
|
||||
let comparisonChart;
|
||||
|
||||
// Define color palette - ONLY 2 COLORS
|
||||
const colorPalette = {
|
||||
'client': '#003D7A', // Dark Blue for Client (RA Bill)
|
||||
'subcon': '#87CEEB' // Light Sky Blue for Subcontractor
|
||||
};
|
||||
|
||||
// 1. Function to Initialize or Update the Chart (VERTICAL BARS - 2 COLORS ONLY)
|
||||
function updateChartUI(labels, clientData, subconData) {
|
||||
const ctx = document.getElementById('groupedBarChart').getContext('2d');
|
||||
if (comparisonChart) comparisonChart.destroy();
|
||||
|
||||
comparisonChart = new Chart(ctx, {
|
||||
type: 'bar', // Vertical bar chart
|
||||
data: {
|
||||
labels: labels,
|
||||
datasets: [
|
||||
{
|
||||
label: 'Client Qty (m³)',
|
||||
data: clientData,
|
||||
backgroundColor: colorPalette.client,
|
||||
borderColor: '#001F4D',
|
||||
borderWidth: 1,
|
||||
borderRadius: 4,
|
||||
hoverBackgroundColor: '#002A5C',
|
||||
hoverBorderWidth: 2
|
||||
},
|
||||
{
|
||||
label: 'Subcontractor Qty (m³)',
|
||||
data: subconData,
|
||||
backgroundColor: colorPalette.subcon,
|
||||
borderColor: '#4A90B8',
|
||||
borderWidth: 1,
|
||||
borderRadius: 4,
|
||||
hoverBackgroundColor: '#6BB3D9',
|
||||
hoverBorderWidth: 2
|
||||
}
|
||||
]
|
||||
},
|
||||
options: {
|
||||
indexAxis: 'x', // Vertical bars (default)
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
interaction: {
|
||||
intersect: false,
|
||||
mode: 'index'
|
||||
},
|
||||
plugins: {
|
||||
legend: {
|
||||
position: 'top',
|
||||
labels: {
|
||||
font: { size: 14, weight: 'bold' },
|
||||
padding: 15,
|
||||
usePointStyle: true,
|
||||
boxWidth: 15
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
backgroundColor: '#2c3e50',
|
||||
padding: 12,
|
||||
titleFont: { size: 13, weight: 'bold' },
|
||||
bodyFont: { size: 12 },
|
||||
borderColor: '#fff',
|
||||
borderWidth: 1,
|
||||
displayColors: true,
|
||||
callbacks: {
|
||||
label: function(context) {
|
||||
let label = context.dataset.label || '';
|
||||
if (label) label += ': ';
|
||||
label += Number(context.parsed.y).toLocaleString('en-IN', {
|
||||
minimumFractionDigits: 2,
|
||||
maximumFractionDigits: 2
|
||||
}) + ' m³';
|
||||
return label;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
stacked: false,
|
||||
grid: {
|
||||
display: false
|
||||
},
|
||||
ticks: {
|
||||
font: { size: 11 },
|
||||
maxRotation: 45,
|
||||
minRotation: 0
|
||||
}
|
||||
},
|
||||
y: {
|
||||
stacked: false,
|
||||
beginAtZero: true,
|
||||
grid: {
|
||||
color: '#ecf0f1',
|
||||
drawBorder: false
|
||||
},
|
||||
ticks: {
|
||||
font: { size: 11 },
|
||||
callback: function(value) {
|
||||
return Number(value).toLocaleString('en-IN');
|
||||
}
|
||||
},
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Excavation Quantity (m³)',
|
||||
font: { size: 12, weight: 'bold' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 2. Function to fetch unique filters (Subcontractors & RA Bills) from DB
|
||||
function loadFilters() {
|
||||
console.log("🔄 Loading filters from /dashboard/api/filters...");
|
||||
fetch('/dashboard/api/filters')
|
||||
.then(res => {
|
||||
console.log(`Response status: ${res.status}`);
|
||||
return res.json();
|
||||
})
|
||||
.then(data => {
|
||||
console.log("✓ Filter data received:", data);
|
||||
|
||||
const raSelect = document.getElementById('filter-ra');
|
||||
|
||||
// CRITICAL: This clears the "RA-01", "RA-02" you typed in manually
|
||||
raSelect.innerHTML = '<option value="Cumulative">Cumulative (All Bills)</option>';
|
||||
|
||||
if (data.ra_bills && data.ra_bills.length > 0) {
|
||||
console.log(`Adding ${data.ra_bills.length} RA bills to dropdown`);
|
||||
data.ra_bills.forEach(billNo => {
|
||||
let opt = document.createElement('option');
|
||||
opt.value = billNo;
|
||||
opt.innerText = billNo; // This will show exactly what's in the DB
|
||||
raSelect.appendChild(opt);
|
||||
console.log(` + Added RA Bill: ${billNo}`);
|
||||
});
|
||||
} else {
|
||||
console.warn("❌ No RA bills found in response");
|
||||
}
|
||||
|
||||
// Repeat same for subcontractor dropdown
|
||||
const subconSelect = document.getElementById('filter-subcon');
|
||||
subconSelect.innerHTML = '<option value="All">All Subcontractors</option>';
|
||||
if (data.subcontractors && data.subcontractors.length > 0) {
|
||||
data.subcontractors.forEach(name => {
|
||||
let opt = document.createElement('option');
|
||||
opt.value = name;
|
||||
opt.innerText = name;
|
||||
subconSelect.appendChild(opt);
|
||||
});
|
||||
}
|
||||
console.log("✓ Filters loaded successfully");
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("❌ Error loading filters:", err);
|
||||
});
|
||||
}
|
||||
// 3. Main function to load data and reflect in UI
|
||||
function loadDashboardData() {
|
||||
const tableType = document.getElementById('filter-table').value;
|
||||
const subcon = document.getElementById('filter-subcon').value;
|
||||
const ra = document.getElementById('filter-ra').value;
|
||||
|
||||
console.log(`📊 Filter values: Table="${tableType}", Subcon="${subcon}", RA="${ra}"`);
|
||||
|
||||
// If still on default values, don't load
|
||||
if (subcon === 'All' && ra === 'Cumulative') {
|
||||
console.warn("⚠️ Please select filters first");
|
||||
return;
|
||||
}
|
||||
|
||||
// Update chart title
|
||||
const tableNames = {
|
||||
'trench': 'Trench Excavation',
|
||||
'manhole': 'Manhole Excavation',
|
||||
'laying': 'Laying'
|
||||
};
|
||||
const chartTitle = document.getElementById('chart-title');
|
||||
if (chartTitle) {
|
||||
chartTitle.textContent = `${tableNames[tableType]}: Client (RA Bill) vs Subcontractor Qty`;
|
||||
}
|
||||
|
||||
console.log(`📊 Loading dashboard data: Table="${tableType}", Subcon="${subcon}", RA="${ra}"`);
|
||||
|
||||
const url = `/dashboard/api/excavation-abstract?table_type=${encodeURIComponent(tableType)}&subcontractor=${encodeURIComponent(subcon)}&ra_bill=${encodeURIComponent(ra)}`;
|
||||
console.log(`Fetching from URL: ${url}`);
|
||||
|
||||
fetch(url)
|
||||
.then(res => {
|
||||
console.log(`Response status: ${res.status}`);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP Error: ${res.status}`);
|
||||
}
|
||||
return res.json();
|
||||
})
|
||||
.then(data => {
|
||||
console.log("✓ Dashboard data received:", data);
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
console.error("❌ Response is not an array:", data);
|
||||
return;
|
||||
}
|
||||
|
||||
if (data.length === 0) {
|
||||
console.warn("⚠️ No data returned for this filter combination");
|
||||
alert("No data found for selected filters");
|
||||
return;
|
||||
}
|
||||
|
||||
const labels = [];
|
||||
const clientData = [];
|
||||
const subconData = [];
|
||||
const tableBody = document.querySelector("#abstract-table tbody");
|
||||
tableBody.innerHTML = "";
|
||||
|
||||
let tClient = 0, tSub = 0, tDiff = 0;
|
||||
|
||||
data.forEach(item => {
|
||||
// Label format: "Soil Type Depth"
|
||||
const label = `${item.soil_type}\n${item.depth}`;
|
||||
labels.push(label);
|
||||
clientData.push(item.client_qty || 0);
|
||||
subconData.push(item.subcon_qty || 0);
|
||||
|
||||
tClient += item.client_qty || 0;
|
||||
tSub += item.subcon_qty || 0;
|
||||
tDiff += (item.difference || 0);
|
||||
|
||||
const diffColor = (item.difference || 0) < 0 ? 'text-danger' : 'text-success';
|
||||
tableBody.innerHTML += `
|
||||
<tr>
|
||||
<td class="small">
|
||||
<strong>${item.soil_type}</strong>
|
||||
<br>
|
||||
<span class="text-muted small">${item.depth}</span>
|
||||
</td>
|
||||
<td class="small text-primary fw-bold">${(item.client_qty || 0).toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
|
||||
<td class="small text-success fw-bold">${(item.subcon_qty || 0).toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
|
||||
<td class="small fw-bold ${diffColor}">${(item.difference || 0).toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
|
||||
</tr>
|
||||
`;
|
||||
});
|
||||
|
||||
const totalDiffColor = tDiff < 0 ? 'text-danger' : 'text-success';
|
||||
document.getElementById('table-totals').innerHTML = `
|
||||
<td class="small fw-bold">TOTAL</td>
|
||||
<td class="small fw-bold text-primary">${tClient.toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
|
||||
<td class="small fw-bold text-success">${tSub.toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
|
||||
<td class="small fw-bold ${totalDiffColor}">${tDiff.toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
|
||||
`;
|
||||
|
||||
// Show data area
|
||||
document.getElementById('empty-state').style.display = 'none';
|
||||
document.getElementById('data-area').style.display = 'block';
|
||||
|
||||
updateChartUI(labels, clientData, subconData);
|
||||
console.log("✓ Chart and table updated successfully");
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("❌ Error loading dashboard data:", err);
|
||||
alert(`Failed to load dashboard data: ${err.message}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Clear dashboard
|
||||
function clearDashboard() {
|
||||
console.log("🗑️ Clearing dashboard...");
|
||||
document.getElementById('filter-table').value = 'trench';
|
||||
document.getElementById('filter-subcon').value = 'All';
|
||||
document.getElementById('filter-ra').value = 'Cumulative';
|
||||
document.getElementById('empty-state').style.display = 'block';
|
||||
document.getElementById('data-area').style.display = 'none';
|
||||
if (comparisonChart) comparisonChart.destroy();
|
||||
}
|
||||
|
||||
// Start: Load filters only, don't auto-load data
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
console.log("🚀 Dashboard initialized");
|
||||
loadFilters();
|
||||
// Don't auto-load data - keep dashboard blank until filters selected
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -1,46 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: mysql:8.0
|
||||
container_name: comparison_db
|
||||
restart: always
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: admin
|
||||
MYSQL_DATABASE: comparisondb
|
||||
ports:
|
||||
- "3307:3306"
|
||||
volumes:
|
||||
- mysql_data:/var/lib/mysql
|
||||
|
||||
app:
|
||||
build: .
|
||||
container_name: comparison_app
|
||||
restart: always
|
||||
environment:
|
||||
FLASK_ENV: development
|
||||
FLASK_DEBUG: "True"
|
||||
FLASK_HOST: "0.0.0.0"
|
||||
FLASK_PORT: "5001"
|
||||
|
||||
DB_DIALECT: mysql
|
||||
DB_DRIVER: pymysql
|
||||
DB_HOST: db
|
||||
DB_PORT: 3306
|
||||
DB_NAME: comparisondb
|
||||
DB_USER: root
|
||||
DB_PASSWORD: admin
|
||||
|
||||
ports:
|
||||
- "5001:5001"
|
||||
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
volumes:
|
||||
- ./app/logs:/app/app/logs
|
||||
- ./app/static/uploads:/app/app/static/uploads
|
||||
- ./app/static/downloads:/app/app/static/downloads
|
||||
|
||||
volumes:
|
||||
mysql_data:
|
||||
3018
logs/app.log
3018
logs/app.log
File diff suppressed because it is too large
Load Diff
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script to verify RA bill fetching from both client and subcontractor tables
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
|
||||
from app import create_app, db
|
||||
from app.models.trench_excavation_model import TrenchExcavation
|
||||
from app.models.tr_ex_client_model import TrenchExcavationClient
|
||||
|
||||
app = create_app()
|
||||
|
||||
with app.app_context():
|
||||
print("=" * 70)
|
||||
print("RA BILL FETCHING TEST")
|
||||
print("=" * 70)
|
||||
|
||||
# Test 1: Count total records
|
||||
print("\n✓ TEST 1: Total Records in Both Tables")
|
||||
subcon_count = db.session.query(TrenchExcavation).count()
|
||||
client_count = db.session.query(TrenchExcavationClient).count()
|
||||
print(f" Subcontractor (TrenchExcavation): {subcon_count} records")
|
||||
print(f" Client (TrenchExcavationClient): {client_count} records")
|
||||
|
||||
# Test 2: Check RA bills in Subcontractor table
|
||||
print("\n✓ TEST 2: RA Bills in Subcontractor Table")
|
||||
subcon_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != "")\
|
||||
.distinct().all()
|
||||
print(f" Distinct RA Bills found: {len(subcon_bills)}")
|
||||
print(f" Bills: {[str(r[0]) for r in subcon_bills]}")
|
||||
|
||||
# Test 3: Check RA bills in Client table
|
||||
print("\n✓ TEST 3: RA Bills in Client Table")
|
||||
client_bills = db.session.query(TrenchExcavationClient.RA_Bill_No)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != "")\
|
||||
.distinct().all()
|
||||
print(f" Distinct RA Bills found: {len(client_bills)}")
|
||||
print(f" Bills: {[str(r[0]) for r in client_bills]}")
|
||||
|
||||
# Test 4: Combined unique RA bills
|
||||
print("\n✓ TEST 4: Combined Unique RA Bills (Union)")
|
||||
combined_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != "")\
|
||||
.union(
|
||||
db.session.query(TrenchExcavationClient.RA_Bill_No)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != "")
|
||||
).order_by(TrenchExcavation.RA_Bill_No).all()
|
||||
|
||||
print(f" Total unique RA Bills: {len(combined_bills)}")
|
||||
print(f" Bills: {[str(r[0]) for r in combined_bills]}")
|
||||
|
||||
# Test 5: Sample data from both tables
|
||||
print("\n✓ TEST 5: Sample RA Bills from Tables")
|
||||
sample_subcon = db.session.query(TrenchExcavation.RA_Bill_No).limit(5).all()
|
||||
sample_client = db.session.query(TrenchExcavationClient.RA_Bill_No).limit(5).all()
|
||||
print(f" Subcontractor samples: {[str(r[0]) for r in sample_subcon]}")
|
||||
print(f" Client samples: {[str(r[0]) for r in sample_client]}")
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("✅ TEST COMPLETE")
|
||||
print("=" * 70)
|
||||
Reference in New Issue
Block a user