webhook
This commit is contained in:
@@ -1,136 +1,308 @@
|
||||
# import matplotlib
|
||||
# matplotlib.use("Agg")
|
||||
|
||||
# from flask import Blueprint, render_template, session, redirect, url_for
|
||||
# import matplotlib.pyplot as plt
|
||||
# import io
|
||||
# import base64
|
||||
# from app.utils.plot_utils import plot_to_base64
|
||||
# from app.services.dashboard_service import DashboardService
|
||||
|
||||
# dashboard_bp = Blueprint("dashboard", __name__, url_prefix="/dashboard")
|
||||
|
||||
# # dashboard_bp = Blueprint("dashboard", __name__)
|
||||
|
||||
# # charts
|
||||
# # def plot_to_base64():
|
||||
# # img = io.BytesIO()
|
||||
# # plt.savefig(img, format="png", bbox_inches="tight")
|
||||
# # plt.close()
|
||||
# # img.seek(0)
|
||||
# # return base64.b64encode(img.getvalue()).decode()
|
||||
|
||||
# # bar chart
|
||||
# def bar_chart():
|
||||
# categories = ["Trench", "Manhole", "Pipe Laying", "Restoration"]
|
||||
# values = [120, 80, 150, 60]
|
||||
|
||||
# plt.figure()
|
||||
# plt.bar(categories, values)
|
||||
# plt.title("Work Category Report")
|
||||
# plt.xlabel("test Category")
|
||||
# plt.ylabel("test Quantity")
|
||||
|
||||
|
||||
# return plot_to_base64(plt)
|
||||
|
||||
# # Pie chart
|
||||
# def pie_chart():
|
||||
# labels = ["Completed", "In Progress", "Pending"]
|
||||
# sizes = [55, 20, 25]
|
||||
|
||||
# plt.figure()
|
||||
# plt.pie(sizes, labels=labels, autopct="%1.1f%%", startangle=140)
|
||||
# plt.title("Project Status")
|
||||
|
||||
# return plot_to_base64(plt)
|
||||
|
||||
# # Histogram chart
|
||||
# def histogram_chart():
|
||||
# daily_work = [5, 10, 15, 20, 20, 25, 30, 35, 40, 45, 50]
|
||||
|
||||
# plt.figure()
|
||||
# plt.hist(daily_work, bins=5)
|
||||
# plt.title("Daily Work Distribution")
|
||||
# plt.xlabel("Work Units")
|
||||
# plt.ylabel("Frequency")
|
||||
|
||||
# return plot_to_base64(plt)
|
||||
|
||||
# # Dashboaed page
|
||||
# @dashboard_bp.route("/")
|
||||
# def dashboard():
|
||||
# if not session.get("user_id"):
|
||||
# return redirect(url_for("auth.login"))
|
||||
|
||||
# return render_template(
|
||||
# "dashboard.html",
|
||||
# title="Dashboard",
|
||||
# bar_chart=bar_chart(),
|
||||
# pie_chart=pie_chart(),
|
||||
# histogram=histogram_chart()
|
||||
# )
|
||||
|
||||
# # subcontractor dashboard
|
||||
# @dashboard_bp.route("/subcontractor_dashboard", methods=["GET", "POST"])
|
||||
# def subcontractor_dashboard():
|
||||
# if not session.get("user_id"):
|
||||
# return redirect(url_for("auth.login"))
|
||||
|
||||
# tr_dash = DashboardService().bar_chart_of_tr_ex
|
||||
|
||||
|
||||
# return render_template(
|
||||
# "subcontractor_dashboard.html",
|
||||
# title="Dashboard",
|
||||
# bar_chart=tr_dash
|
||||
# )
|
||||
|
||||
from flask import Blueprint, render_template, session, redirect, url_for, jsonify
|
||||
from sqlalchemy import func
|
||||
import logging
|
||||
from flask import Blueprint, render_template, session, redirect, url_for, jsonify, request
|
||||
from sqlalchemy import func, union_all
|
||||
from app import db
|
||||
from app.models.trench_excavation_model import TrenchExcavation
|
||||
from app.models.tr_ex_client_model import TrenchExcavationClient
|
||||
from app.models.manhole_excavation_model import ManholeExcavation
|
||||
from app.models.mh_ex_client_model import ManholeExcavationClient
|
||||
from app.models.laying_model import Laying
|
||||
from app.models.laying_client_model import LayingClient
|
||||
from app.models.subcontractor_model import Subcontractor
|
||||
|
||||
dashboard_bp = Blueprint("dashboard", __name__, url_prefix="/dashboard")
|
||||
|
||||
@dashboard_bp.route("/api/live-stats")
|
||||
def live_stats():
|
||||
# Configure logging for debugging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# API to get dynamic filters from database
|
||||
@dashboard_bp.route("/api/filters")
|
||||
def get_filters():
|
||||
try:
|
||||
# 1. Overall Volume
|
||||
t_count = TrenchExcavation.query.count()
|
||||
m_count = ManholeExcavation.query.count()
|
||||
l_count = Laying.query.count()
|
||||
logger.info("=" * 60)
|
||||
logger.info("FETCHING RA BILLS - START")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# 1. Fetch Subcontractors (Linked to Trench entries)
|
||||
logger.info("Step 1: Fetching Subcontractors...")
|
||||
subcontractors = db.session.query(Subcontractor.subcontractor_name)\
|
||||
.join(TrenchExcavation, Subcontractor.id == TrenchExcavation.subcontractor_id)\
|
||||
.distinct().all()
|
||||
logger.info(f"✓ Subcontractors found: {len(subcontractors)}")
|
||||
logger.debug(f" Subcontractor list: {[s[0] for s in subcontractors if s[0]]}")
|
||||
|
||||
# 2. Check total records in TrenchExcavation table
|
||||
logger.info("Step 2: Checking TrenchExcavation table...")
|
||||
total_records = db.session.query(TrenchExcavation).count()
|
||||
logger.info(f"✓ Total TrenchExcavation records: {total_records}")
|
||||
|
||||
# 3. Check records with RA_Bill_No
|
||||
logger.info("Step 3: Checking records with RA_Bill_No...")
|
||||
records_with_ra = db.session.query(TrenchExcavation).filter(TrenchExcavation.RA_Bill_No != None).count()
|
||||
logger.info(f"✓ Records with RA_Bill_No (not null): {records_with_ra}")
|
||||
|
||||
# 4. Check for empty strings
|
||||
records_with_ra_and_value = db.session.query(TrenchExcavation).filter(
|
||||
TrenchExcavation.RA_Bill_No != None,
|
||||
TrenchExcavation.RA_Bill_No != ""
|
||||
).count()
|
||||
logger.info(f"✓ Records with RA_Bill_No (not null & not empty): {records_with_ra_and_value}")
|
||||
|
||||
# 5. Raw sample of RA_Bill_No values
|
||||
logger.info("Step 4: Sampling RA_Bill_No values from database...")
|
||||
sample_bills = db.session.query(TrenchExcavation.RA_Bill_No).limit(10).all()
|
||||
logger.debug(f" Sample RA_Bill_No values (Subcontractor): {[str(r[0]) for r in sample_bills]}")
|
||||
|
||||
sample_bills_client = db.session.query(TrenchExcavationClient.RA_Bill_No).limit(10).all()
|
||||
logger.debug(f" Sample RA_Bill_No values (Client): {[str(r[0]) for r in sample_bills_client]}")
|
||||
|
||||
# 6. Fetch RA Bills from BOTH Subcontractor and Client tables
|
||||
logger.info("Step 5: Fetching distinct RA Bills from both Subcontractor and Client data...")
|
||||
|
||||
# Get RA bills from Subcontractor data
|
||||
subcon_ra_bills = db.session.query(TrenchExcavation.RA_Bill_No)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != "")\
|
||||
.distinct()
|
||||
|
||||
logger.debug(f" Subcontractor RA Bills (before union): {len(subcon_ra_bills.all())}")
|
||||
|
||||
# Get RA bills from Client data
|
||||
client_ra_bills = db.session.query(TrenchExcavationClient.RA_Bill_No)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != "")\
|
||||
.distinct()
|
||||
|
||||
logger.debug(f" Client RA Bills (before union): {len(client_ra_bills.all())}")
|
||||
|
||||
# Union both queries to get all unique RA bills
|
||||
ra_bills_union = db.session.query(TrenchExcavation.RA_Bill_No)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavation.RA_Bill_No != "")\
|
||||
.union(
|
||||
db.session.query(TrenchExcavationClient.RA_Bill_No)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != None)\
|
||||
.filter(TrenchExcavationClient.RA_Bill_No != "")
|
||||
).order_by(TrenchExcavation.RA_Bill_No).all()
|
||||
|
||||
logger.info(f"✓ Distinct RA Bills found (Combined): {len(ra_bills_union)}")
|
||||
ra_bills_list = [r[0] for r in ra_bills_union if r[0]]
|
||||
logger.info(f" RA Bills list: {ra_bills_list}")
|
||||
|
||||
# 7. Debug: Check data types
|
||||
if ra_bills_union:
|
||||
logger.debug(f" First RA Bill value: {ra_bills_union[0][0]}")
|
||||
logger.debug(f" First RA Bill type: {type(ra_bills_union[0][0])}")
|
||||
|
||||
# 2. Location Distribution (Business reach)
|
||||
loc_results = db.session.query(
|
||||
TrenchExcavation.Location,
|
||||
func.count(TrenchExcavation.id)
|
||||
).group_by(TrenchExcavation.Location).all()
|
||||
|
||||
# 3. Work Timeline (Business productivity trend)
|
||||
# Assuming your models have a 'created_at' field
|
||||
timeline_results = db.session.query(
|
||||
func.date(TrenchExcavation.created_at),
|
||||
func.count(TrenchExcavation.id)
|
||||
).group_by(func.date(TrenchExcavation.created_at)).order_by(func.date(TrenchExcavation.created_at)).all()
|
||||
|
||||
return jsonify({
|
||||
"summary": {
|
||||
"trench": t_count,
|
||||
"manhole": m_count,
|
||||
"laying": l_count,
|
||||
"total": t_count + m_count + l_count
|
||||
},
|
||||
"locations": {row[0]: row[1] for row in loc_results if row[0]},
|
||||
"timeline": {str(row[0]): row[1] for row in timeline_results}
|
||||
})
|
||||
response = {
|
||||
"subcontractors": [s[0] for s in subcontractors if s[0]],
|
||||
"ra_bills": ra_bills_list
|
||||
}
|
||||
|
||||
logger.info(f"✓ Response prepared successfully")
|
||||
logger.info("=" * 60)
|
||||
|
||||
return jsonify(response)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("=" * 60)
|
||||
logger.error(f"ERROR in get_filters(): {str(e)}")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.exception("Full traceback:")
|
||||
logger.error("=" * 60)
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
# API for the live abstract data - handles multiple table types
|
||||
@dashboard_bp.route("/api/excavation-abstract")
|
||||
def excavation_abstract():
|
||||
try:
|
||||
logger.info("=" * 60)
|
||||
logger.info("EXCAVATION ABSTRACT FETCH - START")
|
||||
logger.info("=" * 60)
|
||||
|
||||
table_type = request.args.get('table_type', 'trench')
|
||||
subcon_name = request.args.get('subcontractor', 'All')
|
||||
ra_bill = request.args.get('ra_bill', 'Cumulative')
|
||||
|
||||
logger.info(f"Request Parameters:")
|
||||
logger.info(f" Table Type: {table_type}")
|
||||
logger.info(f" Subcontractor: {subcon_name}")
|
||||
logger.info(f" RA Bill: {ra_bill}")
|
||||
|
||||
# Select models and match keys based on table type
|
||||
if table_type == 'trench':
|
||||
SubconModel = TrenchExcavation
|
||||
ClientModel = TrenchExcavationClient
|
||||
table_label = "Trench Excavation"
|
||||
location_key = 'Location'
|
||||
mh_key = 'MH_NO'
|
||||
excavation_columns = [
|
||||
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
|
||||
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
|
||||
("Soft Murum", "3.0-4.5m", "Soft_Murum_3_0_to_4_5"),
|
||||
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
|
||||
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
|
||||
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
|
||||
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
|
||||
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "3.0-4.5m", "Hard_Rock_3_0_to_4_5"),
|
||||
("Hard Rock", "4.5-6.0m", "Hard_Rock_4_5_to_6_0"),
|
||||
("Hard Rock", "6.0-7.5m", "Hard_Rock_6_0_to_7_5"),
|
||||
]
|
||||
elif table_type == 'manhole':
|
||||
SubconModel = ManholeExcavation
|
||||
ClientModel = ManholeExcavationClient
|
||||
table_label = "Manhole Excavation"
|
||||
location_key = 'Location'
|
||||
mh_key = 'MH_NO'
|
||||
excavation_columns = [
|
||||
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
|
||||
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
|
||||
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
|
||||
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
|
||||
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
|
||||
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
|
||||
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
|
||||
]
|
||||
elif table_type == 'laying':
|
||||
SubconModel = Laying
|
||||
ClientModel = LayingClient
|
||||
table_label = "Laying"
|
||||
location_key = 'Location'
|
||||
mh_key = 'MH_NO'
|
||||
excavation_columns = [
|
||||
("Soft Murum", "0-1.5m", "Soft_Murum_0_to_1_5"),
|
||||
("Soft Murum", "1.5-3.0m", "Soft_Murum_1_5_to_3_0"),
|
||||
("Hard Murum", "0-1.5m", "Hard_Murum_0_to_1_5"),
|
||||
("Hard Murum", "1.5-3.0m", "Hard_Murum_1_5_to_3_0"),
|
||||
("Soft Rock", "0-1.5m", "Soft_Rock_0_to_1_5"),
|
||||
("Soft Rock", "1.5-3.0m", "Soft_Rock_1_5_to_3_0"),
|
||||
("Hard Rock", "0-1.5m", "Hard_Rock_0_to_1_5"),
|
||||
("Hard Rock", "1.5-3.0m", "Hard_Rock_1_5_to_3_0"),
|
||||
]
|
||||
else:
|
||||
return jsonify({"error": f"Invalid table_type: {table_type}"}), 400
|
||||
|
||||
logger.info(f"Using table: {table_label}")
|
||||
|
||||
# ===== FETCH SUBCONTRACTOR DATA =====
|
||||
logger.info(f"Fetching Subcontractor data ({SubconModel.__tablename__})...")
|
||||
subcon_query = db.session.query(SubconModel)
|
||||
|
||||
# Check if SubconModel has subcontractor relationship
|
||||
if hasattr(SubconModel, 'subcontractor_id'):
|
||||
subcon_query = subcon_query.join(
|
||||
Subcontractor, Subcontractor.id == SubconModel.subcontractor_id
|
||||
)
|
||||
if subcon_name != 'All':
|
||||
subcon_query = subcon_query.filter(Subcontractor.subcontractor_name == subcon_name)
|
||||
|
||||
subcon_results = subcon_query.all()
|
||||
logger.info(f" Found {len(subcon_results)} subcontractor records")
|
||||
|
||||
# ===== FETCH CLIENT DATA =====
|
||||
logger.info(f"Fetching Client data ({ClientModel.__tablename__})...")
|
||||
client_query = db.session.query(ClientModel)
|
||||
|
||||
if ra_bill != 'Cumulative' and hasattr(ClientModel, 'RA_Bill_No'):
|
||||
client_query = client_query.filter(ClientModel.RA_Bill_No == ra_bill)
|
||||
|
||||
client_results = client_query.all()
|
||||
logger.info(f" Found {len(client_results)} client records")
|
||||
|
||||
# ===== MATCH RECORDS BY MH_NO AND LOCATION =====
|
||||
logger.info("Matching records by MH_NO and Location...")
|
||||
matched_data = {}
|
||||
|
||||
# Build a map of client records by MH_NO + Location
|
||||
client_map = {}
|
||||
for client_record in client_results:
|
||||
mh_no = getattr(client_record, mh_key)
|
||||
location = getattr(client_record, location_key)
|
||||
key = f"{location}|{mh_no}"
|
||||
client_map[key] = client_record
|
||||
|
||||
logger.info(f" Client map has {len(client_map)} unique MH_NO+Location combinations")
|
||||
|
||||
# Match subcontractor records with client records
|
||||
match_count = 0
|
||||
for subcon_record in subcon_results:
|
||||
mh_no = getattr(subcon_record, mh_key)
|
||||
location = getattr(subcon_record, location_key)
|
||||
key = f"{location}|{mh_no}"
|
||||
|
||||
# Only process if matching client record exists
|
||||
if key in client_map:
|
||||
match_count += 1
|
||||
client_record = client_map[key]
|
||||
|
||||
# Aggregate excavation data for this matched pair
|
||||
for soil, depth, col_name in excavation_columns:
|
||||
record_key = f"{soil}|{depth}|{location}|{mh_no}"
|
||||
|
||||
# Get values
|
||||
subcon_val = 0
|
||||
client_val = 0
|
||||
|
||||
if hasattr(subcon_record, col_name):
|
||||
subcon_val = getattr(subcon_record, col_name) or 0
|
||||
|
||||
if hasattr(client_record, col_name):
|
||||
client_val = getattr(client_record, col_name) or 0
|
||||
|
||||
# Only add if at least one has data
|
||||
if subcon_val > 0 or client_val > 0:
|
||||
if record_key not in matched_data:
|
||||
matched_data[record_key] = {
|
||||
"soil_type": soil,
|
||||
"depth": depth,
|
||||
"location": location,
|
||||
"mh_no": mh_no,
|
||||
"client_qty": 0,
|
||||
"subcon_qty": 0
|
||||
}
|
||||
matched_data[record_key]["client_qty"] += client_val
|
||||
matched_data[record_key]["subcon_qty"] += subcon_val
|
||||
|
||||
logger.info(f" Matched {match_count} subcontractor records with client records")
|
||||
logger.info(f" Found {len(matched_data)} excavation items with data")
|
||||
|
||||
# Calculate differences and format response
|
||||
data = []
|
||||
for key, item in matched_data.items():
|
||||
difference = item["subcon_qty"] - item["client_qty"]
|
||||
# Format label as: "Soft Murum 0-1.5m (Location - MH_NO)"
|
||||
label = f"{item['soil_type']} {item['depth']}"
|
||||
data.append({
|
||||
"label": label,
|
||||
"soil_type": item["soil_type"],
|
||||
"depth": item["depth"],
|
||||
"location": item["location"],
|
||||
"mh_no": item["mh_no"],
|
||||
"client_qty": round(item["client_qty"], 2),
|
||||
"subcon_qty": round(item["subcon_qty"], 2),
|
||||
"difference": round(difference, 2)
|
||||
})
|
||||
|
||||
# Sort by location and mh_no for consistency
|
||||
data.sort(key=lambda x: (x["location"], x["mh_no"], x["soil_type"], x["depth"]))
|
||||
|
||||
logger.info(f"Response prepared with {len(data)} matched records")
|
||||
logger.info("=" * 60)
|
||||
|
||||
return jsonify(data)
|
||||
except Exception as e:
|
||||
logger.error("=" * 60)
|
||||
logger.error(f"ERROR in excavation_abstract(): {str(e)}")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.exception("Full traceback:")
|
||||
logger.error("=" * 60)
|
||||
return jsonify({"error": str(e)}), 500
|
||||
|
||||
@dashboard_bp.route("/")
|
||||
def dashboard():
|
||||
if not session.get("user_id"):
|
||||
return redirect(url_for("auth.login"))
|
||||
return render_template("dashboard.html", title="Business Intelligence Dashboard")
|
||||
return render_template("dashboard.html", title="Live Excavation Dashboard")
|
||||
Reference in New Issue
Block a user