Compare commits

...

2 Commits

9 changed files with 4252 additions and 377 deletions

24
Dockerfile Normal file
View File

@@ -0,0 +1,24 @@
FROM python:3.11-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
gcc \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements and install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application code
COPY . .
# Create necessary directories
RUN mkdir -p app/logs app/static/uploads app/static/downloads
# Expose port
EXPOSE 5000
# Run the application
CMD ["python", "run.py"]

View File

@@ -1,3 +1,6 @@
import logging
import logging.handlers
import os
from flask import Flask, redirect, url_for
from app.config import Config
from app.services.db_service import db
@@ -9,6 +12,9 @@ def create_app():
# Initialize extensions
db.init_app(app)
# Configure logging
setup_logging(app)
# Register blueprints
register_blueprints(app)
# Register error handlers
@@ -22,6 +28,68 @@ def create_app():
return app
def setup_logging(app):
"""Configure comprehensive logging for debugging"""
# Ensure logs directory exists
logs_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'logs')
os.makedirs(logs_dir, exist_ok=True)
# Create logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# Remove existing handlers
logger.handlers.clear()
# Create formatters
detailed_formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
# File handler for all logs
file_handler = logging.handlers.RotatingFileHandler(
os.path.join(logs_dir, 'app.log'),
maxBytes=10485760, # 10MB
backupCount=5
)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(detailed_formatter)
logger.addHandler(file_handler)
# File handler for RA bill fetching only
ra_bill_handler = logging.handlers.RotatingFileHandler(
os.path.join(logs_dir, 'ra_bill_fetch.log'),
maxBytes=5242880, # 5MB
backupCount=5
)
ra_bill_handler.setLevel(logging.DEBUG)
ra_bill_handler.setFormatter(detailed_formatter)
# Only attach to relevant loggers
ra_loggers = ['app.routes.dashboard', 'app.routes.file_report']
for logger_name in ra_loggers:
logging.getLogger(logger_name).addHandler(ra_bill_handler)
# Console handler for important messages
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_formatter = logging.Formatter(
'%(levelname)s - %(name)s - %(message)s'
)
console_handler.setFormatter(console_formatter)
logger.addHandler(console_handler)
# Suppress verbose libraries
logging.getLogger('werkzeug').setLevel(logging.WARNING)
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING)
app.logger.info("Logging initialized successfully")
app.logger.info(f"Log files location: {logs_dir}")
def register_blueprints(app):
from app.routes.auth import auth_bp
from app.routes.user import user_bp

View File

@@ -1,136 +1,308 @@
# import matplotlib
# matplotlib.use("Agg")
# from flask import Blueprint, render_template, session, redirect, url_for
# import matplotlib.pyplot as plt
# import io
# import base64
# from app.utils.plot_utils import plot_to_base64
# from app.services.dashboard_service import DashboardService
# dashboard_bp = Blueprint("dashboard", __name__, url_prefix="/dashboard")
# # dashboard_bp = Blueprint("dashboard", __name__)
# # charts
# # def plot_to_base64():
# # img = io.BytesIO()
# # plt.savefig(img, format="png", bbox_inches="tight")
# # plt.close()
# # img.seek(0)
# # return base64.b64encode(img.getvalue()).decode()
# # bar chart
# def bar_chart():
# categories = ["Trench", "Manhole", "Pipe Laying", "Restoration"]
# values = [120, 80, 150, 60]
# plt.figure()
# plt.bar(categories, values)
# plt.title("Work Category Report")
# plt.xlabel("test Category")
# plt.ylabel("test Quantity")
# return plot_to_base64(plt)
# # Pie chart
# def pie_chart():
# labels = ["Completed", "In Progress", "Pending"]
# sizes = [55, 20, 25]
# plt.figure()
# plt.pie(sizes, labels=labels, autopct="%1.1f%%", startangle=140)
# plt.title("Project Status")
# return plot_to_base64(plt)
# # Histogram chart
# def histogram_chart():
# daily_work = [5, 10, 15, 20, 20, 25, 30, 35, 40, 45, 50]
# plt.figure()
# plt.hist(daily_work, bins=5)
# plt.title("Daily Work Distribution")
# plt.xlabel("Work Units")
# plt.ylabel("Frequency")
# return plot_to_base64(plt)
# # Dashboaed page
# @dashboard_bp.route("/")
# def dashboard():
# if not session.get("user_id"):
# return redirect(url_for("auth.login"))
# return render_template(
# "dashboard.html",
# title="Dashboard",
# bar_chart=bar_chart(),
# pie_chart=pie_chart(),
# histogram=histogram_chart()
# )
# # subcontractor dashboard
# @dashboard_bp.route("/subcontractor_dashboard", methods=["GET", "POST"])
# def subcontractor_dashboard():
# if not session.get("user_id"):
# return redirect(url_for("auth.login"))
# tr_dash = DashboardService().bar_chart_of_tr_ex
# return render_template(
# "subcontractor_dashboard.html",
# title="Dashboard",
# bar_chart=tr_dash
# )
from flask import Blueprint, render_template, session, redirect, url_for, jsonify
from sqlalchemy import func
import logging
from flask import Blueprint, render_template, session, redirect, url_for, jsonify, request
from sqlalchemy import func, union_all
from app import db
from app.models.trench_excavation_model import TrenchExcavation
from app.models.tr_ex_client_model import TrenchExcavationClient
from app.models.manhole_excavation_model import ManholeExcavation
from app.models.mh_ex_client_model import ManholeExcavationClient
from app.models.laying_model import Laying
from app.models.laying_client_model import LayingClient
from app.models.subcontractor_model import Subcontractor
dashboard_bp = Blueprint("dashboard", __name__, url_prefix="/dashboard")
@dashboard_bp.route("/api/live-stats")
def live_stats():
# Configure logging for debugging
logger = logging.getLogger(__name__)
# API to get dynamic filters from database
@dashboard_bp.route("/api/filters")
def get_filters():
try:
# 1. Overall Volume
t_count = TrenchExcavation.query.count()
m_count = ManholeExcavation.query.count()
l_count = Laying.query.count()
logger.info("=" * 60)
logger.info("FETCHING RA BILLS - START")
logger.info("=" * 60)
# 2. Location Distribution (Business reach)
loc_results = db.session.query(
TrenchExcavation.Location,
func.count(TrenchExcavation.id)
).group_by(TrenchExcavation.Location).all()
# 1. Fetch Subcontractors (Linked to Trench entries)
logger.info("Step 1: Fetching Subcontractors...")
subcontractors = db.session.query(Subcontractor.subcontractor_name)\
.join(TrenchExcavation, Subcontractor.id == TrenchExcavation.subcontractor_id)\
.distinct().all()
logger.info(f"✓ Subcontractors found: {len(subcontractors)}")
logger.debug(f" Subcontractor list: {[s[0] for s in subcontractors if s[0]]}")
# 3. Work Timeline (Business productivity trend)
# Assuming your models have a 'created_at' field
timeline_results = db.session.query(
func.date(TrenchExcavation.created_at),
func.count(TrenchExcavation.id)
).group_by(func.date(TrenchExcavation.created_at)).order_by(func.date(TrenchExcavation.created_at)).all()
# 2. Check total records in TrenchExcavation table
logger.info("Step 2: Checking TrenchExcavation table...")
total_records = db.session.query(TrenchExcavation).count()
logger.info(f"✓ Total TrenchExcavation records: {total_records}")
# 3. Check records with RA_Bill_No
logger.info("Step 3: Checking records with RA_Bill_No...")
records_with_ra = db.session.query(TrenchExcavation).filter(TrenchExcavation.RA_Bill_No != None).count()
logger.info(f"✓ Records with RA_Bill_No (not null): {records_with_ra}")
# 4. Check for empty strings
records_with_ra_and_value = db.session.query(TrenchExcavation).filter(
TrenchExcavation.RA_Bill_No != None,
TrenchExcavation.RA_Bill_No != ""
).count()
logger.info(f"✓ Records with RA_Bill_No (not null & not empty): {records_with_ra_and_value}")
# 5. Raw sample of RA_Bill_No values
logger.info("Step 4: Sampling RA_Bill_No values from database...")
sample_bills = db.session.query(TrenchExcavation.RA_Bill_No).limit(10).all()
logger.debug(f" Sample RA_Bill_No values (Subcontractor): {[str(r[0]) for r in sample_bills]}")
sample_bills_client = db.session.query(TrenchExcavationClient.RA_Bill_No).limit(10).all()
logger.debug(f" Sample RA_Bill_No values (Client): {[str(r[0]) for r in sample_bills_client]}")
# 6. Fetch RA Bills from BOTH Subcontractor and Client tables
logger.info("Step 5: Fetching distinct RA Bills from both Subcontractor and Client data...")
# Get RA bills from Subcontractor data
subcon_ra_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
.filter(TrenchExcavation.RA_Bill_No != None)\
.filter(TrenchExcavation.RA_Bill_No != "")\
.distinct()
logger.debug(f" Subcontractor RA Bills (before union): {len(subcon_ra_bills.all())}")
# Get RA bills from Client data
client_ra_bills = db.session.query(TrenchExcavationClient.RA_Bill_No)\
.filter(TrenchExcavationClient.RA_Bill_No != None)\
.filter(TrenchExcavationClient.RA_Bill_No != "")\
.distinct()
logger.debug(f" Client RA Bills (before union): {len(client_ra_bills.all())}")
# Union both queries to get all unique RA bills
ra_bills_union = db.session.query(TrenchExcavation.RA_Bill_No)\
.filter(TrenchExcavation.RA_Bill_No != None)\
.filter(TrenchExcavation.RA_Bill_No != "")\
.union(
db.session.query(TrenchExcavationClient.RA_Bill_No)\
.filter(TrenchExcavationClient.RA_Bill_No != None)\
.filter(TrenchExcavationClient.RA_Bill_No != "")
).order_by(TrenchExcavation.RA_Bill_No).all()
logger.info(f"✓ Distinct RA Bills found (Combined): {len(ra_bills_union)}")
ra_bills_list = [r[0] for r in ra_bills_union if r[0]]
logger.info(f" RA Bills list: {ra_bills_list}")
# 7. Debug: Check data types
if ra_bills_union:
logger.debug(f" First RA Bill value: {ra_bills_union[0][0]}")
logger.debug(f" First RA Bill type: {type(ra_bills_union[0][0])}")
response = {
"subcontractors": [s[0] for s in subcontractors if s[0]],
"ra_bills": ra_bills_list
}
logger.info(f"✓ Response prepared successfully")
logger.info("=" * 60)
return jsonify(response)
return jsonify({
"summary": {
"trench": t_count,
"manhole": m_count,
"laying": l_count,
"total": t_count + m_count + l_count
},
"locations": {row[0]: row[1] for row in loc_results if row[0]},
"timeline": {str(row[0]): row[1] for row in timeline_results}
})
except Exception as e:
logger.error("=" * 60)
logger.error(f"ERROR in get_filters(): {str(e)}")
logger.error(f"Error type: {type(e).__name__}")
logger.exception("Full traceback:")
logger.error("=" * 60)
return jsonify({"error": str(e)}), 500
# API for the live abstract data - handles multiple table types
@dashboard_bp.route("/api/excavation-abstract")
def excavation_abstract():
try:
logger.info("=" * 60)
logger.info("EXCAVATION ABSTRACT FETCH - START")
logger.info("=" * 60)
table_type = request.args.get('table_type', 'trench')
subcon_name = request.args.get('subcontractor', 'All')
ra_bill = request.args.get('ra_bill', 'Cumulative')
logger.info(f"Request Parameters:")
logger.info(f" Table Type: {table_type}")
logger.info(f" Subcontractor: {subcon_name}")
logger.info(f" RA Bill: {ra_bill}")
# Select models and match keys based on table type
if table_type == 'trench':
SubconModel = TrenchExcavation
ClientModel = TrenchExcavationClient
table_label = "Trench Excavation"
location_key = 'Location'
mh_key = 'MH_NO'
excavation_columns = [
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
("Soft Murum", "3.0-4.5m", "Soft_Murum_3_0_to_4_5"),
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
("Hard Rock", "3.0-4.5m", "Hard_Rock_3_0_to_4_5"),
("Hard Rock", "4.5-6.0m", "Hard_Rock_4_5_to_6_0"),
("Hard Rock", "6.0-7.5m", "Hard_Rock_6_0_to_7_5"),
]
elif table_type == 'manhole':
SubconModel = ManholeExcavation
ClientModel = ManholeExcavationClient
table_label = "Manhole Excavation"
location_key = 'Location'
mh_key = 'MH_NO'
excavation_columns = [
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
]
elif table_type == 'laying':
SubconModel = Laying
ClientModel = LayingClient
table_label = "Laying"
location_key = 'Location'
mh_key = 'MH_NO'
excavation_columns = [
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
]
else:
return jsonify({"error": f"Invalid table_type: {table_type}"}), 400
logger.info(f"Using table: {table_label}")
# ===== FETCH SUBCONTRACTOR DATA =====
logger.info(f"Fetching Subcontractor data ({SubconModel.__tablename__})...")
subcon_query = db.session.query(SubconModel)
# Check if SubconModel has subcontractor relationship
if hasattr(SubconModel, 'subcontractor_id'):
subcon_query = subcon_query.join(
Subcontractor, Subcontractor.id == SubconModel.subcontractor_id
)
if subcon_name != 'All':
subcon_query = subcon_query.filter(Subcontractor.subcontractor_name == subcon_name)
subcon_results = subcon_query.all()
logger.info(f" Found {len(subcon_results)} subcontractor records")
# ===== FETCH CLIENT DATA =====
logger.info(f"Fetching Client data ({ClientModel.__tablename__})...")
client_query = db.session.query(ClientModel)
if ra_bill != 'Cumulative' and hasattr(ClientModel, 'RA_Bill_No'):
client_query = client_query.filter(ClientModel.RA_Bill_No == ra_bill)
client_results = client_query.all()
logger.info(f" Found {len(client_results)} client records")
# ===== MATCH RECORDS BY MH_NO AND LOCATION =====
logger.info("Matching records by MH_NO and Location...")
matched_data = {}
# Build a map of client records by MH_NO + Location
client_map = {}
for client_record in client_results:
mh_no = getattr(client_record, mh_key)
location = getattr(client_record, location_key)
key = f"{location}|{mh_no}"
client_map[key] = client_record
logger.info(f" Client map has {len(client_map)} unique MH_NO+Location combinations")
# Match subcontractor records with client records
match_count = 0
for subcon_record in subcon_results:
mh_no = getattr(subcon_record, mh_key)
location = getattr(subcon_record, location_key)
key = f"{location}|{mh_no}"
# Only process if matching client record exists
if key in client_map:
match_count += 1
client_record = client_map[key]
# Aggregate excavation data for this matched pair
for soil, depth, col_name in excavation_columns:
record_key = f"{soil}|{depth}|{location}|{mh_no}"
# Get values
subcon_val = 0
client_val = 0
if hasattr(subcon_record, col_name):
subcon_val = getattr(subcon_record, col_name) or 0
if hasattr(client_record, col_name):
client_val = getattr(client_record, col_name) or 0
# Only add if at least one has data
if subcon_val > 0 or client_val > 0:
if record_key not in matched_data:
matched_data[record_key] = {
"soil_type": soil,
"depth": depth,
"location": location,
"mh_no": mh_no,
"client_qty": 0,
"subcon_qty": 0
}
matched_data[record_key]["client_qty"] += client_val
matched_data[record_key]["subcon_qty"] += subcon_val
logger.info(f" Matched {match_count} subcontractor records with client records")
logger.info(f" Found {len(matched_data)} excavation items with data")
# Calculate differences and format response
data = []
for key, item in matched_data.items():
difference = item["subcon_qty"] - item["client_qty"]
# Format label as: "Soft Murum 0-1.5m (Location - MH_NO)"
label = f"{item['soil_type']} {item['depth']}"
data.append({
"label": label,
"soil_type": item["soil_type"],
"depth": item["depth"],
"location": item["location"],
"mh_no": item["mh_no"],
"client_qty": round(item["client_qty"], 2),
"subcon_qty": round(item["subcon_qty"], 2),
"difference": round(difference, 2)
})
# Sort by location and mh_no for consistency
data.sort(key=lambda x: (x["location"], x["mh_no"], x["soil_type"], x["depth"]))
logger.info(f"Response prepared with {len(data)} matched records")
logger.info("=" * 60)
return jsonify(data)
except Exception as e:
logger.error("=" * 60)
logger.error(f"ERROR in excavation_abstract(): {str(e)}")
logger.error(f"Error type: {type(e).__name__}")
logger.exception("Full traceback:")
logger.error("=" * 60)
return jsonify({"error": str(e)}), 500
@dashboard_bp.route("/")
def dashboard():
if not session.get("user_id"):
return redirect(url_for("auth.login"))
return render_template("dashboard.html", title="Business Intelligence Dashboard")
return render_template("dashboard.html", title="Live Excavation Dashboard")

View File

@@ -1,5 +1,6 @@
import pandas as pd
import io
import logging
from flask import Blueprint, render_template, request, send_file, flash
from app.utils.helpers import login_required
@@ -19,6 +20,9 @@ from app.models.laying_client_model import LayingClient
# --- BLUEPRINT DEFINITION ---
file_report_bp = Blueprint("file_report", __name__, url_prefix="/file")
# Configure logging for debugging
logger = logging.getLogger(__name__)
# --- Client class ---
class ClientBill:
def __init__(self):
@@ -28,20 +32,57 @@ class ClientBill:
self.df_laying = pd.DataFrame()
def Fetch(self, RA_Bill_No):
trench = TrenchExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
mh = ManholeExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
dc = ManholeDomesticChamberClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
lay = LayingClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
logger.info("=" * 60)
logger.info("ClientBill.Fetch() - START")
logger.info("=" * 60)
logger.info(f"Fetching data for RA_Bill_No: '{RA_Bill_No}'")
logger.debug(f" Type of RA_Bill_No: {type(RA_Bill_No)}")
try:
logger.info("Step 1: Fetching TrenchExcavationClient records...")
trench = TrenchExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
logger.info(f"✓ Trench records found: {len(trench)}")
logger.info("Step 2: Fetching ManholeExcavationClient records...")
mh = ManholeExcavationClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
logger.info(f"✓ Manhole records found: {len(mh)}")
logger.info("Step 3: Fetching ManholeDomesticChamberClient records...")
dc = ManholeDomesticChamberClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
logger.info(f"✓ Domestic Chamber records found: {len(dc)}")
logger.info("Step 4: Fetching LayingClient records...")
lay = LayingClient.query.filter_by(RA_Bill_No=RA_Bill_No).all()
logger.info(f"✓ Laying records found: {len(lay)}")
logger.info("Step 5: Converting to DataFrames...")
self.df_tr = pd.DataFrame([c.serialize() for c in trench])
self.df_mh = pd.DataFrame([c.serialize() for c in mh])
self.df_dc = pd.DataFrame([c.serialize() for c in dc])
self.df_laying = pd.DataFrame([c.serialize() for c in lay])
logger.debug(f" Trench DF shape: {self.df_tr.shape}")
logger.debug(f" Manhole DF shape: {self.df_mh.shape}")
logger.debug(f" Domestic Chamber DF shape: {self.df_dc.shape}")
logger.debug(f" Laying DF shape: {self.df_laying.shape}")
logger.info("Step 6: Cleaning DataFrames...")
drop_cols = ["id", "created_at", "_sa_instance_state"]
for df in [self.df_tr, self.df_mh, self.df_dc, self.df_laying]:
if not df.empty:
df.drop(columns=drop_cols, errors="ignore", inplace=True)
logger.debug(f" Cleaned DF with shape: {df.shape}")
logger.info("✓ ClientBill.Fetch() completed successfully")
logger.info("=" * 60)
except Exception as e:
logger.error("=" * 60)
logger.error(f"ERROR in ClientBill.Fetch(): {str(e)}")
logger.error(f"Error type: {type(e).__name__}")
logger.exception("Full traceback:")
logger.error("=" * 60)
raise
# --- Subcontractor class ---
class SubcontractorBill:
@@ -52,26 +93,68 @@ class SubcontractorBill:
self.df_laying = pd.DataFrame()
def Fetch(self, RA_Bill_No=None, subcontractor_id=None):
logger.info("=" * 60)
logger.info("SubcontractorBill.Fetch() - START")
logger.info("=" * 60)
logger.info(f"Parameters:")
logger.info(f" RA_Bill_No: '{RA_Bill_No}' (type: {type(RA_Bill_No)})")
logger.info(f" subcontractor_id: '{subcontractor_id}' (type: {type(subcontractor_id)})")
try:
filters = {}
if subcontractor_id:
filters["subcontractor_id"] = subcontractor_id
logger.debug(f" Added filter - subcontractor_id: {subcontractor_id}")
if RA_Bill_No:
filters["RA_Bill_No"] = RA_Bill_No
logger.debug(f" Added filter - RA_Bill_No: {RA_Bill_No}")
logger.info(f"Applied filters: {filters}")
logger.info("Step 1: Fetching TrenchExcavation records...")
trench = TrenchExcavation.query.filter_by(**filters).all()
mh = ManholeExcavation.query.filter_by(**filters).all()
dc = ManholeDomesticChamber.query.filter_by(**filters).all()
lay = Laying.query.filter_by(**filters).all()
logger.info(f"✓ Trench records found: {len(trench)}")
logger.info("Step 2: Fetching ManholeExcavation records...")
mh = ManholeExcavation.query.filter_by(**filters).all()
logger.info(f"✓ Manhole records found: {len(mh)}")
logger.info("Step 3: Fetching ManholeDomesticChamber records...")
dc = ManholeDomesticChamber.query.filter_by(**filters).all()
logger.info(f"✓ Domestic Chamber records found: {len(dc)}")
logger.info("Step 4: Fetching Laying records...")
lay = Laying.query.filter_by(**filters).all()
logger.info(f"✓ Laying records found: {len(lay)}")
logger.info("Step 5: Converting to DataFrames...")
self.df_tr = pd.DataFrame([c.serialize() for c in trench])
self.df_mh = pd.DataFrame([c.serialize() for c in mh])
self.df_dc = pd.DataFrame([c.serialize() for c in dc])
self.df_laying = pd.DataFrame([c.serialize() for c in lay])
logger.debug(f" Trench DF shape: {self.df_tr.shape}")
logger.debug(f" Manhole DF shape: {self.df_mh.shape}")
logger.debug(f" Domestic Chamber DF shape: {self.df_dc.shape}")
logger.debug(f" Laying DF shape: {self.df_laying.shape}")
logger.info("Step 6: Cleaning DataFrames...")
drop_cols = ["id", "created_at", "_sa_instance_state"]
for df in [self.df_tr, self.df_mh, self.df_dc, self.df_laying]:
if not df.empty:
df.drop(columns=drop_cols, errors="ignore", inplace=True)
logger.debug(f" Cleaned DF with shape: {df.shape}")
logger.info("✓ SubcontractorBill.Fetch() completed successfully")
logger.info("=" * 60)
except Exception as e:
logger.error("=" * 60)
logger.error(f"ERROR in SubcontractorBill.Fetch(): {str(e)}")
logger.error(f"Error type: {type(e).__name__}")
logger.exception("Full traceback:")
logger.error("=" * 60)
raise
# --- subcontractor report only ---

View File

@@ -1,8 +1,7 @@
from flask import Blueprint, render_template, request, send_file, flash
from collections import defaultdict
import pandas as pd
import io
import re
from collections import defaultdict
from app.models.subcontractor_model import Subcontractor
from app.models.trench_excavation_model import TrenchExcavation
@@ -16,20 +15,26 @@ from app.models.mh_dc_client_model import ManholeDomesticChamberClient
from app.models.laying_client_model import LayingClient
from app.utils.helpers import login_required
import re
generate_report_bp = Blueprint("generate_report", __name__, url_prefix="/report")
# --- REGEX PATTERNS FOR TOTALING ---
# sum field of pipe laying (pipe_150_mm)
PIPE_MM_PATTERN = re.compile(r"^pipe_\d+_mm$")
# sum fields of MH dc (d_0_to_0_75)
D_RANGE_PATTERN = re.compile( r"^d_\d+(?:_\d+)?_to_\d+(?:_\d+)?$")
# --- UTILITIES ---
# NORMALIZER
def normalize_key(value):
if value is None:
return ""
return None
return str(value).strip().upper()
# HEADER FORMATTER
def format_header(header):
if "-" in header:
prefix, rest = header.split("-", 1)
@@ -40,6 +45,7 @@ def format_header(header):
parts = rest.split("_")
result = []
i = 0
while i < len(parts):
if i + 1 < len(parts) and parts[i].isdigit() and parts[i + 1].isdigit():
result.append(f"{parts[i]}.{parts[i + 1]}")
@@ -51,70 +57,79 @@ def format_header(header):
final_text = " ".join(result)
return f"{prefix}-{final_text}" if prefix else final_text
# LOOKUP CREATOR
def make_lookup(rows, key_field):
"""Creates a mapping of (Location, Key) to a list of records."""
lookup = defaultdict(list)
lookup = {}
for r in rows:
# Check both capitalized and lowercase keys for robustness
loc = normalize_key(r.get("Location") or r.get("location"))
key = normalize_key(r.get(key_field) or r.get(key_field.lower()))
if loc and key:
lookup[(loc, key)].append(r)
location = normalize_key(r.get("Location"))
key_val = normalize_key(r.get(key_field))
if location and key_val:
lookup.setdefault((location, key_val), []).append(r)
return lookup
def calculate_row_total(row_dict):
"""Calculates total based on _total suffix or regex patterns."""
return sum(
float(v or 0) for k, v in row_dict.items()
if k.endswith("_total") or D_RANGE_PATTERN.match(k) or PIPE_MM_PATTERN.match(k)
)
# --- CORE COMPARISON LOGIC ---
# COMPARISON BUILDER
def build_comparison(client_rows, contractor_rows, key_field):
# 1. Create Lookup for Subcontractors
contractor_lookup = {}
for r in contractor_rows:
loc = normalize_key(r.get("Location") or r.get("location"))
key = normalize_key(r.get(key_field) or r.get(key_field.lower()))
if loc and key:
contractor_lookup[(loc, key)] = r
contractor_lookup = make_lookup(contractor_rows, key_field)
output = []
# 2. Iterate through Client rows
used_index = defaultdict(int) # 🔥 THIS FIXES YOUR ISSUE
for c in client_rows:
loc_raw = c.get("Location") or c.get("location")
key_raw = c.get(key_field) or c.get(key_field.lower())
client_location = normalize_key(c.get("Location"))
client_key = normalize_key(c.get(key_field))
loc_norm = normalize_key(loc_raw)
key_norm = normalize_key(key_raw)
if not client_location or not client_key:
continue
# Match check
s = contractor_lookup.get((loc_norm, key_norm))
subs = contractor_lookup.get((client_location, client_key))
if not subs:
continue
# We only include the row if there is a match (Inner Join)
if s:
client_total = calculate_row_total(c)
sub_total = calculate_row_total(s)
idx = used_index[(client_location, client_key)]
# ❗ If subcontractor rows are exhausted, skip
if idx >= len(subs):
continue
s = subs[idx] # ✅ take NEXT subcontractor row
used_index[(client_location, client_key)] += 1
# ---- totals ----
client_total = sum(
float(v or 0)
for k, v in c.items()
if k.endswith("_total")
or D_RANGE_PATTERN.match(k)
or PIPE_MM_PATTERN.match(k)
)
sub_total = sum(
float(v or 0)
for k, v in s.items()
if k.endswith("_total")
or D_RANGE_PATTERN.match(k)
or PIPE_MM_PATTERN.match(k)
)
row = {
"Location": loc_raw,
key_field.replace("_", " "): key_raw
"Location": client_location,
key_field.replace("_", " "): client_key
}
# Add Client Data
for k, v in c.items():
if k in ["id", "created_at"]: continue
if k not in ["id", "created_at"]:
row[f"Client-{k}"] = v
row["Client-Total"] = round(client_total, 2)
row[" "] = ""
row[" "] = "" # Spacer
# Add Subcontractor Data (Aligned on same row)
for k, v in s.items():
if k in ["id", "created_at", "subcontractor_id"]: continue
if k not in ["id", "created_at", "subcontractor_id"]:
row[f"Subcontractor-{k}"] = v
row["Subcontractor-Total"] = round(sub_total, 2)
@@ -122,54 +137,55 @@ def build_comparison(client_rows, contractor_rows, key_field):
output.append(row)
# 3. Handle the "Empty/Blank" scenario using pd.concat
if not output:
# Create a basic dataframe with a message so the Excel file isn't empty/corrupt
return pd.DataFrame([{"Location": "N/A", "Message": "No matching data found"}])
df = pd.DataFrame(output)
df.columns = [format_header(col) for col in df.columns]
return df
# --- EXCEL WRITER ---
# EXCEL SHEET WRITER
def write_sheet(writer, df, sheet_name, subcontractor_name):
if df.empty:
return
workbook = writer.book
df.to_excel(writer, sheet_name=sheet_name, index=False, startrow=3)
ws = writer.sheets[sheet_name]
# Formats
title_fmt = workbook.add_format({"bold": True, "font_size": 14})
client_header_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#B6DAED", "align": "center"})
sub_header_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#F3A081", "align": "center"})
total_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#F7D261", "align": "center"})
diff_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#82DD49", "align": "center"})
default_header_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#E7E6E6", "align": "center"})
client_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#B6DAED"})
sub_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#F3A081"})
total_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#F7D261"})
diff_fmt = workbook.add_format({"bold": True, "border": 1, "bg_color": "#82DD49"})
default_header_fmt = workbook.add_format({"bold": True,"border": 1,"bg_color": "#E7E6E6","align": "center","valign": "vcenter"})
ws.merge_range(
0, 0, 0, len(df.columns) - 1,
"CLIENT vs SUBCONTRACTOR",
title_fmt
)
ws.merge_range(
1, 0, 1, len(df.columns) - 1,
f"Subcontractor Name - {subcontractor_name}",
title_fmt
)
# Header Titles
ws.merge_range(0, 0, 0, len(df.columns) - 1, "CLIENT vs SUBCONTRACTOR COMPARISON", title_fmt)
ws.merge_range(1, 0, 1, len(df.columns) - 1, f"Subcontractor: {subcontractor_name}", title_fmt)
for col_num, col_name in enumerate(df.columns):
if col_name.startswith("Client-"):
fmt = client_header_fmt
ws.write(3, col_num, col_name, client_fmt)
elif col_name.startswith("Subcontractor-"):
fmt = sub_header_fmt
elif "Total" in col_name:
fmt = total_fmt
ws.write(3, col_num, col_name, sub_fmt)
elif col_name.endswith("_total") or col_name.endswith("_total") :
ws.write(3, col_num, col_name, total_fmt)
elif col_name == "Diff":
fmt = diff_fmt
ws.write(3, col_num, col_name, diff_fmt)
else:
fmt = default_header_fmt
ws.write(3, col_num, col_name, default_header_fmt)
ws.write(3, col_num, col_name, fmt)
ws.set_column(col_num, col_num, 18)
ws.set_column(col_num, col_num, 20)
# --- ROUTES ---
# REPORT ROUTE
@generate_report_bp.route("/comparison_report", methods=["GET", "POST"])
@login_required
def comparison_report():
@@ -178,29 +194,48 @@ def comparison_report():
if request.method == "POST":
subcontractor_id = request.form.get("subcontractor_id")
if not subcontractor_id:
flash("Please select a subcontractor", "danger")
flash("Please select subcontractor", "danger")
return render_template("generate_comparison_report.html",subcontractors=subcontractors)
subcontractor = Subcontractor.query.get_or_404(subcontractor_id)
# Build Dataframes for each section
sections = [
(TrenchExcavationClient, TrenchExcavation, "Tr.Ex"),
(ManholeExcavationClient, ManholeExcavation, "Mh.Ex"),
(ManholeDomesticChamberClient, ManholeDomesticChamber, "MH & DC"),
(LayingClient, Laying, "Laying")
]
# -------- DATA --------
tr_client = [r.serialize() for r in TrenchExcavationClient.query.all()]
tr_sub = [r.serialize() for r in TrenchExcavation.query.filter_by(
subcontractor_id=subcontractor_id
).all()]
df_tr = build_comparison(tr_client, tr_sub, "MH_NO")
mh_client = [r.serialize() for r in ManholeExcavationClient.query.all()]
mh_sub = [r.serialize() for r in ManholeExcavation.query.filter_by(
subcontractor_id=subcontractor_id
).all()]
df_mh = build_comparison(mh_client, mh_sub, "MH_NO")
dc_client = [r.serialize() for r in ManholeDomesticChamberClient.query.all()]
dc_sub = [r.serialize() for r in ManholeDomesticChamber.query.filter_by(
subcontractor_id=subcontractor_id
).all()]
df_dc = build_comparison(dc_client, dc_sub, "MH_NO")
# df_dc = build_comparison_mh_dc(dc_client, dc_sub, "MH_NO")
lay_client = [r.serialize() for r in LayingClient.query.all()]
lay_sub = [r.serialize() for r in Laying.query.filter_by(
subcontractor_id=subcontractor_id
).all()]
df_lay = build_comparison(lay_client, lay_sub, "MH_NO")
# df_lay = build_comparison_laying(lay_client, lay_sub, "MH_NO")
# -------- EXCEL --------
output = io.BytesIO()
filename = f"{subcontractor.subcontractor_name}_Comparison_Report.xlsx"
with pd.ExcelWriter(output, engine="xlsxwriter") as writer:
for client_model, sub_model, sheet_name in sections:
c_data = [r.serialize() for r in client_model.query.all()]
s_data = [r.serialize() for r in sub_model.query.filter_by(subcontractor_id=subcontractor_id).all()]
df = build_comparison(c_data, s_data, "MH_NO")
write_sheet(writer, df, sheet_name, subcontractor.subcontractor_name)
write_sheet(writer, df_tr, "Tr.Ex", subcontractor.subcontractor_name)
write_sheet(writer, df_mh, "Mh.Ex", subcontractor.subcontractor_name)
write_sheet(writer, df_dc, "MH & DC", subcontractor.subcontractor_name)
write_sheet(writer, df_lay, "Laying", subcontractor.subcontractor_name)
output.seek(0)
return send_file(
@@ -211,3 +246,107 @@ def comparison_report():
)
return render_template("generate_comparison_report.html",subcontractors=subcontractors)
# def build_comparison_mh_dc(client_rows, contractor_rows, key_field):
# contractor_lookup = make_lookup(contractor_rows, key_field)
# mh_dc_fields = ManholeDomesticChamberClient.sum_mh_dc_fields()
# output = []
# for c in client_rows:
# loc = normalize_key(c.get("Location"))
# key = normalize_key(c.get(key_field))
# if not loc or not key:
# continue
# s = contractor_lookup.get((loc, key))
# if not s:
# continue
# client_total = sum(float(c.get(f, 0) or 0) for f in mh_dc_fields)
# sub_total = sum(float(s.get(f, 0) or 0) for f in mh_dc_fields)
# row = {
# "Location": loc,
# key_field.replace("_", " "): key
# }
# # CLIENT ALL FIELDS
# for k, v in c.items():
# if k in ["id", "created_at"]:
# continue
# row[f"Client-{k}"] = v
# row["Client-Total"] = round(client_total, 2)
# row[" "] = ""
# # SUBCONTRACTOR ALL FIELDS
# for k, v in s.items():
# if k in ["id", "created_at", "subcontractor_id"]:
# continue
# row[f"Subcontractor-{k}"] = v
# row["Subcontractor-Total"] = round(sub_total, 2)
# row["Diff"] = round(client_total - sub_total, 2)
# output.append(row)
# df = pd.DataFrame(output)
# df.columns = [format_header(col) for col in df.columns]
# return df
# def build_comparison_laying(client_rows, contractor_rows, key_field):
# contractor_lookup = make_lookup(contractor_rows, key_field)
# laying_fields = Laying.sum_laying_fields()
# output = []
# for c in client_rows:
# loc = normalize_key(c.get("Location"))
# key = normalize_key(c.get(key_field))
# if not loc or not key:
# continue
# s = contractor_lookup.get((loc, key))
# if not s:
# continue
# client_total = sum(float(c.get(f, 0) or 0) for f in laying_fields)
# sub_total = sum(float(s.get(f, 0) or 0) for f in laying_fields)
# print("--------------",key,"----------")
# print("sum -client_total ",client_total)
# print("sum -sub_total ",sub_total)
# print("Diff ---- ",client_total - sub_total)
# print("------------------------")
# row = {
# "Location": loc,
# key_field.replace("_", " "): key
# }
# # CLIENT ALL FIELDS
# for k, v in c.items():
# if k in ["id", "created_at"]:
# continue
# row[f"Client-{k}"] = v
# row["Client-Total"] = round(client_total, 2)
# row[" "] = ""
# # SUBCONTRACTOR ALL FIELDS
# for k, v in s.items():
# if k in ["id", "created_at", "subcontractor_id"]:
# continue
# row[f"Subcontractor-{k}"] = v
# row["Subcontractor-Total"] = round(sub_total, 2)
# row["Diff"] = round(client_total - sub_total, 2)
# output.append(row)
# df = pd.DataFrame(output)
# df.columns = [format_header(col) for col in df.columns]
# return df

View File

@@ -1,51 +1,82 @@
{% extends "base.html" %}
{% block content %}
<div class="container-fluid px-2 px-md-4">
<h4 class="mb-3 text-center text-md-start">Comparison Software Solapur (UGD) - Live Dashboard</h4>
<div class="row g-3 mb-4">
<div class="col-12 col-md-4">
<div class="card text-white bg-primary shadow h-100">
<div class="card-body text-center text-md-start">
<h6>Trenching Units</h6>
<h3 class="fw-bold" id="card-trench">0</h3>
</div>
</div>
</div>
<div class="col-12 col-md-4">
<div class="card text-white bg-success shadow h-100">
<div class="card-body text-center text-md-start">
<h6>Manhole Units</h6>
<h3 class="fw-bold" id="card-manhole">0</h3>
</div>
</div>
</div>
<div class="col-12 col-md-4">
<div class="card text-dark bg-warning shadow h-100">
<div class="card-body text-center text-md-start">
<h6>Laying Units</h6>
<h3 class="fw-bold" id="card-laying">0</h3>
</div>
</div>
</div>
</div>
<div class="container-fluid py-4" style="background-color: #f8f9fa;">
<h3 class="mb-4 fw-bold text-uppercase">Abstract Excavation Dashboard</h3>
<div class="card shadow-sm mb-4">
<div class="card-body bg-white">
<div class="row g-3">
<div class="col-12 col-md-6">
<div class="card shadow-sm h-100">
<div class="card-header bg-dark text-white">Live Category Bar Chart</div>
<div class="card-body">
<canvas id="liveBarChart" style="max-height:300px;"></canvas>
<div class="col-md-2">
<label class="form-label fw-bold">Comparison Type</label>
<select id="filter-table" class="form-select" onchange="loadDashboardData()">
<option value="trench">Trench Excavation</option>
<option value="manhole">Manhole Excavation</option>
<option value="laying">Laying</option>
</select>
</div>
<div class="col-md-3">
<label class="form-label fw-bold">Subcontractor</label>
<select id="filter-subcon" class="form-select" onchange="loadDashboardData()">
<option value="All">All Subcontractors</option>
</select>
</div>
<div class="col-md-3">
<label class="form-label fw-bold">RA Bill No</label>
<select id="filter-ra" class="form-select" onchange="loadDashboardData()">
<option value="Cumulative">Cumulative (All Bills)</option>
</select>
</div>
<div class="col-md-4 d-flex align-items-end gap-2">
<button class="btn btn-primary flex-grow-1" onclick="loadDashboardData()">🔄 Refresh</button>
<button class="btn btn-secondary flex-grow-1" onclick="clearDashboard()">🗑️ Clear</button>
</div>
</div>
</div>
</div>
<div class="col-12 col-md-6">
<!-- Empty State (Shown on page load) -->
<div id="empty-state" class="alert alert-info text-center py-5">
<h5>📊 Select filters to display data</h5>
<p>Choose a Subcontractor and/or RA Bill to see the excavation abstract comparison.</p>
</div>
<!-- Data Display Area (Hidden by default) -->
<div id="data-area" style="display: none;">
<div class="row">
<div class="col-lg-8">
<div class="card shadow-sm h-100">
<div class="card-header bg-dark text-white">Location Distribution Pie Chart</div>
<div class="card-body">
<canvas id="livePieChart" style="max-height:300px;"></canvas>
<div class="card-header bg-primary text-white fw-bold d-flex justify-content-between align-items-center">
<span id="chart-title">Excavation Comparison: Client vs Subcontractor Qty</span>
<small class=\"fw-normal\">(Horizontal Bar Chart)</small>
</div>
<div class=\"card-body\" style=\"position: relative; height: 700px; overflow-y: auto;\">
<canvas id="groupedBarChart"></canvas>
</div>
</div>
</div>
<div class="col-lg-4">
<div class="card shadow-sm h-100">
<div class="card-header bg-success text-white fw-bold">Excavation Abstract Table</div>
<div class="card-body p-0">
<div class="table-responsive" style="max-height: 500px; overflow-y: auto;">
<table class="table table-hover mb-0" id="abstract-table">
<thead class="table-light sticky-top">
<tr>
<th class="small">Soil / Depth</th>
<th class="small">Client (m³)</th>
<th class="small">Subcon (m³)</th>
<th class="small">Diff</th>
</tr>
</thead>
<tbody></tbody>
<tfoot class="table-light fw-bold position-sticky bottom-0">
<tr id="table-totals" class="bg-light"></tr>
</tfoot>
</table>
</div>
</div>
</div>
</div>
</div>
@@ -55,64 +86,290 @@
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script>
// 2. Initialize the Bar Chart
const barCtx = document.getElementById('liveBarChart').getContext('2d');
let liveBarChart = new Chart(barCtx, {
type: 'bar',
let comparisonChart;
// Define color palette - ONLY 2 COLORS
const colorPalette = {
'client': '#003D7A', // Dark Blue for Client (RA Bill)
'subcon': '#87CEEB' // Light Sky Blue for Subcontractor
};
// 1. Function to Initialize or Update the Chart (VERTICAL BARS - 2 COLORS ONLY)
function updateChartUI(labels, clientData, subconData) {
const ctx = document.getElementById('groupedBarChart').getContext('2d');
if (comparisonChart) comparisonChart.destroy();
comparisonChart = new Chart(ctx, {
type: 'bar', // Vertical bar chart
data: {
labels: ['Trenching', 'Manholes', 'Laying'],
datasets: [{
label: 'Units Completed',
data: [0, 0, 0],
backgroundColor: ['#0d6efd', '#198754', '#ffc107']
}]
labels: labels,
datasets: [
{
label: 'Client Qty (m³)',
data: clientData,
backgroundColor: colorPalette.client,
borderColor: '#001F4D',
borderWidth: 1,
borderRadius: 4,
hoverBackgroundColor: '#002A5C',
hoverBorderWidth: 2
},
options: { responsive: true, maintainAspectRatio: false }
});
// 3. Initialize the Pie Chart
const pieCtx = document.getElementById('livePieChart').getContext('2d');
let livePieChart = new Chart(pieCtx, {
type: 'pie',
data: {
labels: [], // Will be filled from SQL
datasets: [{
data: [],
backgroundColor: ['#0d6efd', '#198754', '#ffc107', '#6f42c1', '#fd7e14']
}]
{
label: 'Subcontractor Qty (m³)',
data: subconData,
backgroundColor: colorPalette.subcon,
borderColor: '#4A90B8',
borderWidth: 1,
borderRadius: 4,
hoverBackgroundColor: '#6BB3D9',
hoverBorderWidth: 2
}
]
},
options: { responsive: true, maintainAspectRatio: false }
options: {
indexAxis: 'x', // Vertical bars (default)
responsive: true,
maintainAspectRatio: false,
interaction: {
intersect: false,
mode: 'index'
},
plugins: {
legend: {
position: 'top',
labels: {
font: { size: 14, weight: 'bold' },
padding: 15,
usePointStyle: true,
boxWidth: 15
}
},
tooltip: {
backgroundColor: '#2c3e50',
padding: 12,
titleFont: { size: 13, weight: 'bold' },
bodyFont: { size: 12 },
borderColor: '#fff',
borderWidth: 1,
displayColors: true,
callbacks: {
label: function(context) {
let label = context.dataset.label || '';
if (label) label += ': ';
label += Number(context.parsed.y).toLocaleString('en-IN', {
minimumFractionDigits: 2,
maximumFractionDigits: 2
}) + ' m³';
return label;
}
}
}
},
scales: {
x: {
stacked: false,
grid: {
display: false
},
ticks: {
font: { size: 11 },
maxRotation: 45,
minRotation: 0
}
},
y: {
stacked: false,
beginAtZero: true,
grid: {
color: '#ecf0f1',
drawBorder: false
},
ticks: {
font: { size: 11 },
callback: function(value) {
return Number(value).toLocaleString('en-IN');
}
},
title: {
display: true,
text: 'Excavation Quantity (m³)',
font: { size: 12, weight: 'bold' }
}
}
}
}
});
// 4. Function to Fetch Live Data from your Python API
function fetchLiveData() {
fetch('/dashboard/api/live-stats') // This matches the route we created in the "Kitchen"
.then(response => response.json())
.then(data => {
// Update the Summary Cards
document.getElementById('card-trench').innerText = data.summary.trench;
document.getElementById('card-manhole').innerText = data.summary.manhole;
document.getElementById('card-laying').innerText = data.summary.laying;
// Update Bar Chart
liveBarChart.data.datasets[0].data = [
data.summary.trench,
data.summary.manhole,
data.summary.laying
];
liveBarChart.update();
// Update Pie Chart (Location stats)
livePieChart.data.labels = Object.keys(data.locations);
livePieChart.data.datasets[0].data = Object.values(data.locations);
livePieChart.update();
})
.catch(err => console.error("Error fetching live data:", err));
}
// 5. Check for updates every 10 seconds (Real-time effect)
setInterval(fetchLiveData, 10000);
fetchLiveData(); // Load immediately on page open
</script>
// 2. Function to fetch unique filters (Subcontractors & RA Bills) from DB
function loadFilters() {
console.log("🔄 Loading filters from /dashboard/api/filters...");
fetch('/dashboard/api/filters')
.then(res => {
console.log(`Response status: ${res.status}`);
return res.json();
})
.then(data => {
console.log("✓ Filter data received:", data);
const raSelect = document.getElementById('filter-ra');
// CRITICAL: This clears the "RA-01", "RA-02" you typed in manually
raSelect.innerHTML = '<option value="Cumulative">Cumulative (All Bills)</option>';
if (data.ra_bills && data.ra_bills.length > 0) {
console.log(`Adding ${data.ra_bills.length} RA bills to dropdown`);
data.ra_bills.forEach(billNo => {
let opt = document.createElement('option');
opt.value = billNo;
opt.innerText = billNo; // This will show exactly what's in the DB
raSelect.appendChild(opt);
console.log(` + Added RA Bill: ${billNo}`);
});
} else {
console.warn("❌ No RA bills found in response");
}
// Repeat same for subcontractor dropdown
const subconSelect = document.getElementById('filter-subcon');
subconSelect.innerHTML = '<option value="All">All Subcontractors</option>';
if (data.subcontractors && data.subcontractors.length > 0) {
data.subcontractors.forEach(name => {
let opt = document.createElement('option');
opt.value = name;
opt.innerText = name;
subconSelect.appendChild(opt);
});
}
console.log("✓ Filters loaded successfully");
})
.catch(err => {
console.error("❌ Error loading filters:", err);
});
}
// 3. Main function to load data and reflect in UI
function loadDashboardData() {
const tableType = document.getElementById('filter-table').value;
const subcon = document.getElementById('filter-subcon').value;
const ra = document.getElementById('filter-ra').value;
console.log(`📊 Filter values: Table="${tableType}", Subcon="${subcon}", RA="${ra}"`);
// If still on default values, don't load
if (subcon === 'All' && ra === 'Cumulative') {
console.warn("⚠️ Please select filters first");
return;
}
// Update chart title
const tableNames = {
'trench': 'Trench Excavation',
'manhole': 'Manhole Excavation',
'laying': 'Laying'
};
const chartTitle = document.getElementById('chart-title');
if (chartTitle) {
chartTitle.textContent = `${tableNames[tableType]}: Client (RA Bill) vs Subcontractor Qty`;
}
console.log(`📊 Loading dashboard data: Table="${tableType}", Subcon="${subcon}", RA="${ra}"`);
const url = `/dashboard/api/excavation-abstract?table_type=${encodeURIComponent(tableType)}&subcontractor=${encodeURIComponent(subcon)}&ra_bill=${encodeURIComponent(ra)}`;
console.log(`Fetching from URL: ${url}`);
fetch(url)
.then(res => {
console.log(`Response status: ${res.status}`);
if (!res.ok) {
throw new Error(`HTTP Error: ${res.status}`);
}
return res.json();
})
.then(data => {
console.log("✓ Dashboard data received:", data);
if (!Array.isArray(data)) {
console.error("❌ Response is not an array:", data);
return;
}
if (data.length === 0) {
console.warn("⚠️ No data returned for this filter combination");
alert("No data found for selected filters");
return;
}
const labels = [];
const clientData = [];
const subconData = [];
const tableBody = document.querySelector("#abstract-table tbody");
tableBody.innerHTML = "";
let tClient = 0, tSub = 0, tDiff = 0;
data.forEach(item => {
// Label format: "Soil Type Depth"
const label = `${item.soil_type}\n${item.depth}`;
labels.push(label);
clientData.push(item.client_qty || 0);
subconData.push(item.subcon_qty || 0);
tClient += item.client_qty || 0;
tSub += item.subcon_qty || 0;
tDiff += (item.difference || 0);
const diffColor = (item.difference || 0) < 0 ? 'text-danger' : 'text-success';
tableBody.innerHTML += `
<tr>
<td class="small">
<strong>${item.soil_type}</strong>
<br>
<span class="text-muted small">${item.depth}</span>
</td>
<td class="small text-primary fw-bold">${(item.client_qty || 0).toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
<td class="small text-success fw-bold">${(item.subcon_qty || 0).toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
<td class="small fw-bold ${diffColor}">${(item.difference || 0).toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
</tr>
`;
});
const totalDiffColor = tDiff < 0 ? 'text-danger' : 'text-success';
document.getElementById('table-totals').innerHTML = `
<td class="small fw-bold">TOTAL</td>
<td class="small fw-bold text-primary">${tClient.toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
<td class="small fw-bold text-success">${tSub.toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
<td class="small fw-bold ${totalDiffColor}">${tDiff.toLocaleString('en-IN', {maximumFractionDigits: 2})}</td>
`;
// Show data area
document.getElementById('empty-state').style.display = 'none';
document.getElementById('data-area').style.display = 'block';
updateChartUI(labels, clientData, subconData);
console.log("✓ Chart and table updated successfully");
})
.catch(err => {
console.error("❌ Error loading dashboard data:", err);
alert(`Failed to load dashboard data: ${err.message}`);
});
}
// Clear dashboard
function clearDashboard() {
console.log("🗑️ Clearing dashboard...");
document.getElementById('filter-table').value = 'trench';
document.getElementById('filter-subcon').value = 'All';
document.getElementById('filter-ra').value = 'Cumulative';
document.getElementById('empty-state').style.display = 'block';
document.getElementById('data-area').style.display = 'none';
if (comparisonChart) comparisonChart.destroy();
}
// Start: Load filters only, don't auto-load data
document.addEventListener("DOMContentLoaded", () => {
console.log("🚀 Dashboard initialized");
loadFilters();
// Don't auto-load data - keep dashboard blank until filters selected
});
</script>
{% endblock %}

46
docker-compose.yml Normal file
View File

@@ -0,0 +1,46 @@
version: '3.8'
services:
db:
image: mysql:8.0
container_name: comparison_db
restart: always
environment:
MYSQL_ROOT_PASSWORD: admin
MYSQL_DATABASE: comparisondb
ports:
- "3307:3306"
volumes:
- mysql_data:/var/lib/mysql
app:
build: .
container_name: comparison_app
restart: always
environment:
FLASK_ENV: development
FLASK_DEBUG: "True"
FLASK_HOST: "0.0.0.0"
FLASK_PORT: "5001"
DB_DIALECT: mysql
DB_DRIVER: pymysql
DB_HOST: db
DB_PORT: 3306
DB_NAME: comparisondb
DB_USER: root
DB_PASSWORD: admin
ports:
- "5001:5001"
depends_on:
- db
volumes:
- ./app/logs:/app/app/logs
- ./app/static/uploads:/app/app/static/uploads
- ./app/static/downloads:/app/app/static/downloads
volumes:
mysql_data:

File diff suppressed because it is too large Load Diff

68
test_ra_bills.py Normal file
View File

@@ -0,0 +1,68 @@
#!/usr/bin/env python3
"""
Test script to verify RA bill fetching from both client and subcontractor tables
"""
import sys
import os
sys.path.insert(0, os.path.dirname(__file__))
from app import create_app, db
from app.models.trench_excavation_model import TrenchExcavation
from app.models.tr_ex_client_model import TrenchExcavationClient
app = create_app()
with app.app_context():
print("=" * 70)
print("RA BILL FETCHING TEST")
print("=" * 70)
# Test 1: Count total records
print("\n✓ TEST 1: Total Records in Both Tables")
subcon_count = db.session.query(TrenchExcavation).count()
client_count = db.session.query(TrenchExcavationClient).count()
print(f" Subcontractor (TrenchExcavation): {subcon_count} records")
print(f" Client (TrenchExcavationClient): {client_count} records")
# Test 2: Check RA bills in Subcontractor table
print("\n✓ TEST 2: RA Bills in Subcontractor Table")
subcon_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
.filter(TrenchExcavation.RA_Bill_No != None)\
.filter(TrenchExcavation.RA_Bill_No != "")\
.distinct().all()
print(f" Distinct RA Bills found: {len(subcon_bills)}")
print(f" Bills: {[str(r[0]) for r in subcon_bills]}")
# Test 3: Check RA bills in Client table
print("\n✓ TEST 3: RA Bills in Client Table")
client_bills = db.session.query(TrenchExcavationClient.RA_Bill_No)\
.filter(TrenchExcavationClient.RA_Bill_No != None)\
.filter(TrenchExcavationClient.RA_Bill_No != "")\
.distinct().all()
print(f" Distinct RA Bills found: {len(client_bills)}")
print(f" Bills: {[str(r[0]) for r in client_bills]}")
# Test 4: Combined unique RA bills
print("\n✓ TEST 4: Combined Unique RA Bills (Union)")
combined_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
.filter(TrenchExcavation.RA_Bill_No != None)\
.filter(TrenchExcavation.RA_Bill_No != "")\
.union(
db.session.query(TrenchExcavationClient.RA_Bill_No)\
.filter(TrenchExcavationClient.RA_Bill_No != None)\
.filter(TrenchExcavationClient.RA_Bill_No != "")
).order_by(TrenchExcavation.RA_Bill_No).all()
print(f" Total unique RA Bills: {len(combined_bills)}")
print(f" Bills: {[str(r[0]) for r in combined_bills]}")
# Test 5: Sample data from both tables
print("\n✓ TEST 5: Sample RA Bills from Tables")
sample_subcon = db.session.query(TrenchExcavation.RA_Bill_No).limit(5).all()
sample_client = db.session.query(TrenchExcavationClient.RA_Bill_No).limit(5).all()
print(f" Subcontractor samples: {[str(r[0]) for r in sample_subcon]}")
print(f" Client samples: {[str(r[0]) for r in sample_client]}")
print("\n" + "=" * 70)
print("✅ TEST COMPLETE")
print("=" * 70)