2025-12-11 10:16:43 +05:30
|
|
|
import os
|
|
|
|
|
import pandas as pd
|
|
|
|
|
from werkzeug.utils import secure_filename
|
|
|
|
|
from app.config import Config
|
|
|
|
|
from app import db
|
|
|
|
|
from app.models.trench_excavation_model import TrenchExcavation
|
2025-12-11 17:58:56 +05:30
|
|
|
from app.models.manhole_excavation_model import ManholeExcavation
|
|
|
|
|
from app.models.manhole_domestic_chamber_model import ManholeDomesticChamber
|
2025-12-14 18:04:03 +05:30
|
|
|
|
2025-12-13 18:50:27 +05:30
|
|
|
from app.models.tr_ex_client_model import TrenchExcavationClient
|
|
|
|
|
from app.models.mh_ex_client_model import ManholeExcavationClient
|
2025-12-14 18:04:03 +05:30
|
|
|
from app.models.mh_dc_client_model import ManholeDomesticChamberClient
|
|
|
|
|
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-11 10:16:43 +05:30
|
|
|
from app.utils.file_utils import ensure_upload_folder
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class FileService:
|
|
|
|
|
|
|
|
|
|
def allowed_file(self, filename):
|
|
|
|
|
return "." in filename and filename.rsplit(".", 1)[1].lower() in Config.ALLOWED_EXTENSIONS
|
|
|
|
|
|
2025-12-18 10:41:54 +05:30
|
|
|
# def handle_file_upload(self, file, subcontractor_id, file_type):
|
2025-12-19 17:53:45 +05:30
|
|
|
def handle_file_upload(self, file, subcontractor_id, RA_Bill_No):
|
2025-12-11 10:16:43 +05:30
|
|
|
|
|
|
|
|
if not subcontractor_id:
|
|
|
|
|
return False, "Please select subcontractor."
|
2025-12-20 12:39:16 +05:30
|
|
|
if not RA_Bill_No:
|
|
|
|
|
return False, "Please Enter RA Bill No)."
|
2025-12-11 10:16:43 +05:30
|
|
|
if not file or file.filename == "":
|
|
|
|
|
return False, "No file selected."
|
|
|
|
|
if not self.allowed_file(file.filename):
|
|
|
|
|
return False, "Invalid file type! Allowed: CSV, XLSX, XLS"
|
|
|
|
|
|
|
|
|
|
ensure_upload_folder()
|
|
|
|
|
|
|
|
|
|
folder = os.path.join(Config.UPLOAD_FOLDER, f"sub_{subcontractor_id}")
|
|
|
|
|
os.makedirs(folder, exist_ok=True)
|
|
|
|
|
|
|
|
|
|
filename = secure_filename(file.filename)
|
|
|
|
|
filepath = os.path.join(folder, filename)
|
|
|
|
|
file.save(filepath)
|
|
|
|
|
|
|
|
|
|
try:
|
2025-12-18 10:41:54 +05:30
|
|
|
|
2025-12-18 18:29:25 +05:30
|
|
|
df_tr_ex = pd.read_excel(filepath, sheet_name ="Tr.Ex.", header=12)
|
|
|
|
|
df_mh_ex = pd.read_excel(filepath, sheet_name="MH Ex.", header=12)
|
2025-12-19 17:53:45 +05:30
|
|
|
df_mh_dc = pd.read_excel(filepath, sheet_name="MH & DC", header=11)
|
2025-12-18 18:29:25 +05:30
|
|
|
|
|
|
|
|
self.process_trench_excavation(df_tr_ex, subcontractor_id, RA_Bill_No)
|
2025-12-19 17:53:45 +05:30
|
|
|
self.process_manhole_excavation(df_mh_ex, subcontractor_id, RA_Bill_No)
|
|
|
|
|
self.process_manhole_domestic_chamber(df_mh_dc, subcontractor_id, RA_Bill_No)
|
2025-12-18 18:29:25 +05:30
|
|
|
|
2025-12-11 10:16:43 +05:30
|
|
|
return True, "File uploaded successfully."
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return False, f"Processing failed: {e}"
|
2025-12-19 17:53:45 +05:30
|
|
|
|
2025-12-11 10:16:43 +05:30
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
# Trench Excavation save method (TrenchExcavation model)
|
2025-12-18 18:29:25 +05:30
|
|
|
def process_trench_excavation(self, df, subcontractor_id,RA_Bill_No):
|
|
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("trench_excavation RA_Bill_No :",RA_Bill_No)
|
2025-12-18 18:29:25 +05:30
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("=== trench_excavation ===")
|
2025-12-18 18:29:25 +05:30
|
|
|
print(df.columns.tolist())
|
|
|
|
|
print("===================")
|
|
|
|
|
|
|
|
|
|
# Clean column names
|
|
|
|
|
df.columns = (
|
|
|
|
|
df.columns.astype(str)
|
|
|
|
|
.str.strip()
|
|
|
|
|
.str.replace(r"[^\w]", "_", regex=True)
|
|
|
|
|
.str.replace("__+", "_", regex=True)
|
|
|
|
|
.str.strip("_")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Remove completely empty rows
|
|
|
|
|
df = df.dropna(how="all")
|
|
|
|
|
|
|
|
|
|
# Forward fill merged Location
|
2025-12-11 10:16:43 +05:30
|
|
|
if "Location" in df.columns:
|
|
|
|
|
df["Location"] = df["Location"].ffill()
|
|
|
|
|
|
|
|
|
|
saved_count = 0
|
2025-12-18 18:29:25 +05:30
|
|
|
skipped_count = 0
|
2025-12-11 10:16:43 +05:30
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for index, row in df.iterrows():
|
|
|
|
|
record_data = {}
|
2025-12-18 18:29:25 +05:30
|
|
|
location = row.get("Location")
|
|
|
|
|
mh_no = row.get("MH_NO")
|
|
|
|
|
|
|
|
|
|
if (pd.isna(location) or str(location).strip() == "" or pd.isna(mh_no) or str(mh_no).strip() == ""):
|
|
|
|
|
skipped_count += 1
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Map only model columns
|
2025-12-11 10:16:43 +05:30
|
|
|
for col in df.columns:
|
|
|
|
|
if hasattr(TrenchExcavation, col):
|
|
|
|
|
value = row[col]
|
2025-12-18 18:29:25 +05:30
|
|
|
|
2025-12-11 10:16:43 +05:30
|
|
|
# Normalize empty values
|
2025-12-18 18:29:25 +05:30
|
|
|
if pd.isna(value) or str(value).strip() in ["", "-", "—", "nan"]:
|
2025-12-11 10:16:43 +05:30
|
|
|
value = None
|
|
|
|
|
|
|
|
|
|
record_data[col] = value
|
2025-12-18 18:29:25 +05:30
|
|
|
|
|
|
|
|
# If all mapped fields are None → skip
|
|
|
|
|
if all(v is None for v in record_data.values()):
|
|
|
|
|
skipped_count += 1
|
|
|
|
|
continue
|
2025-12-11 10:16:43 +05:30
|
|
|
|
|
|
|
|
record = TrenchExcavation(
|
2025-12-18 18:29:25 +05:30
|
|
|
subcontractor_id=subcontractor_id, RA_Bill_No=RA_Bill_No,
|
2025-12-11 10:16:43 +05:30
|
|
|
**record_data
|
|
|
|
|
)
|
|
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("trench_excavation Saving Row → Location:", record.Location, " MH_NO:", record.MH_NO)
|
2025-12-18 18:29:25 +05:30
|
|
|
|
2025-12-11 10:16:43 +05:30
|
|
|
db.session.add(record)
|
|
|
|
|
saved_count += 1
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
2025-12-18 18:29:25 +05:30
|
|
|
|
|
|
|
|
return True, (
|
|
|
|
|
f"Trench Excavation saved successfully. "
|
|
|
|
|
f"Inserted: {saved_count}, Skipped: {skipped_count}"
|
|
|
|
|
)
|
2025-12-11 10:16:43 +05:30
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
2025-12-18 18:29:25 +05:30
|
|
|
return False, f"Trench Excavation save failed: {e}"
|
2025-12-11 17:58:56 +05:30
|
|
|
|
|
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
# Manhole Excavation save method (ManholeExcavation model)
|
2025-12-18 18:29:25 +05:30
|
|
|
def process_manhole_excavation(self, df, subcontractor_id, RA_Bill_No):
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("manhole_excavation RA_Bill_No:",RA_Bill_No)
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("=== manhole_excavation ===")
|
2025-12-18 18:29:25 +05:30
|
|
|
print(df.columns.tolist())
|
|
|
|
|
print("===================")
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-18 18:29:25 +05:30
|
|
|
# Clean column names
|
|
|
|
|
df.columns = (
|
|
|
|
|
df.columns.astype(str)
|
|
|
|
|
.str.strip()
|
|
|
|
|
.str.replace(r"[^\w]", "_", regex=True)
|
|
|
|
|
.str.replace("__+", "_", regex=True)
|
|
|
|
|
.str.strip("_")
|
|
|
|
|
)
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-18 18:29:25 +05:30
|
|
|
# Remove completely empty rows
|
|
|
|
|
df = df.dropna(how="all")
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-18 18:29:25 +05:30
|
|
|
# Forward fill merged Location
|
|
|
|
|
if "Location" in df.columns:
|
|
|
|
|
df["Location"] = df["Location"].ffill()
|
|
|
|
|
|
|
|
|
|
saved_count = 0
|
|
|
|
|
skipped_count = 0
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for index, row in df.iterrows():
|
|
|
|
|
record_data = {}
|
|
|
|
|
location = row.get("Location")
|
|
|
|
|
mh_no = row.get("MH_NO")
|
|
|
|
|
|
|
|
|
|
if (pd.isna(location) or str(location).strip() == "" or pd.isna(mh_no) or str(mh_no).strip() == ""):
|
|
|
|
|
skipped_count += 1
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Map only model columns
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
if hasattr(ManholeExcavation, col):
|
|
|
|
|
value = row[col]
|
|
|
|
|
|
|
|
|
|
# Normalize empty values
|
|
|
|
|
if pd.isna(value) or str(value).strip() in ["", "-", "—", "nan"]:
|
|
|
|
|
value = None
|
|
|
|
|
|
|
|
|
|
record_data[col] = value
|
|
|
|
|
|
|
|
|
|
# If all mapped fields are None → skip
|
|
|
|
|
if all(v is None for v in record_data.values()):
|
|
|
|
|
skipped_count += 1
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
record = ManholeExcavation(
|
|
|
|
|
subcontractor_id=subcontractor_id, RA_Bill_No=RA_Bill_No,
|
|
|
|
|
**record_data
|
|
|
|
|
)
|
|
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("manhole_excavation Saving Row → Location:", record.Location, " MH_NO:", record.MH_NO)
|
2025-12-18 18:29:25 +05:30
|
|
|
|
|
|
|
|
db.session.add(record)
|
|
|
|
|
saved_count += 1
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
return True, (
|
|
|
|
|
f"Manhole Excavation saved successfully. "
|
|
|
|
|
f"Inserted: {saved_count}, Skipped: {skipped_count}"
|
2025-12-11 17:58:56 +05:30
|
|
|
)
|
|
|
|
|
|
2025-12-18 18:29:25 +05:30
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
return False, f"Manhole Excavation save failed: {e}"
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
# Manhole and Domestic Chamber Construction save method (ManholeDomesticChamber model)
|
2025-12-19 17:53:45 +05:30
|
|
|
def process_manhole_domestic_chamber(self, df, subcontractor_id, RA_Bill_No):
|
|
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("manhole_domestic_chamber RA_Bill_No :",RA_Bill_No)
|
2025-12-19 17:53:45 +05:30
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("=== manhole_domestic_chamber ===")
|
2025-12-19 17:53:45 +05:30
|
|
|
print(df.columns.tolist())
|
|
|
|
|
print("===================")
|
|
|
|
|
|
|
|
|
|
# Clean column names
|
|
|
|
|
df.columns = (
|
|
|
|
|
df.columns.astype(str)
|
|
|
|
|
.str.strip()
|
|
|
|
|
.str.replace(r"[^\w]", "_", regex=True)
|
|
|
|
|
.str.replace("__+", "_", regex=True)
|
|
|
|
|
.str.strip("_")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Remove completely empty rows
|
|
|
|
|
df = df.dropna(how="all")
|
|
|
|
|
|
|
|
|
|
# Forward fill merged Location
|
|
|
|
|
if "Location" in df.columns:
|
|
|
|
|
df["Location"] = df["Location"].ffill()
|
|
|
|
|
|
|
|
|
|
saved_count = 0
|
|
|
|
|
skipped_count = 0
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for index, row in df.iterrows():
|
|
|
|
|
record_data = {}
|
|
|
|
|
location = row.get("Location")
|
|
|
|
|
Node_No = row.get("Node_No")
|
|
|
|
|
|
|
|
|
|
if (pd.isna(location) or str(location).strip() == "" or pd.isna(Node_No) or str(Node_No).strip() == ""):
|
|
|
|
|
skipped_count += 1
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Map only model columns
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
if hasattr(ManholeDomesticChamber, col):
|
|
|
|
|
value = row[col]
|
|
|
|
|
|
|
|
|
|
# Normalize empty values
|
|
|
|
|
if pd.isna(value) or str(value).strip() in ["", "-", "—", "nan"]:
|
|
|
|
|
value = None
|
|
|
|
|
|
|
|
|
|
record_data[col] = value
|
|
|
|
|
|
|
|
|
|
# If all mapped fields are None → skip
|
|
|
|
|
if all(v is None for v in record_data.values()):
|
|
|
|
|
skipped_count += 1
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
record = ManholeDomesticChamber(
|
|
|
|
|
subcontractor_id=subcontractor_id, RA_Bill_No=RA_Bill_No,
|
|
|
|
|
**record_data
|
|
|
|
|
)
|
|
|
|
|
|
2025-12-20 12:39:16 +05:30
|
|
|
print("manhole_domestic_chamber Saving Row → Location:", record.Location, " Node_No:", record.Node_No)
|
2025-12-19 17:53:45 +05:30
|
|
|
|
|
|
|
|
db.session.add(record)
|
|
|
|
|
saved_count += 1
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
return True, (
|
|
|
|
|
f"Manhole Domestic Chamber saved successfully. "
|
|
|
|
|
f"Inserted: {saved_count}, Skipped: {skipped_count}"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
return False, f"Manhole Domestic Chamber save failed: {e}"
|
|
|
|
|
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-18 18:29:25 +05:30
|
|
|
|
2025-12-13 18:50:27 +05:30
|
|
|
# ---------------------- client ----------------------------------
|
2025-12-20 12:39:16 +05:30
|
|
|
|
2025-12-13 18:50:27 +05:30
|
|
|
def client_trench_excavation(self, df, subcontractor_id):
|
|
|
|
|
df.columns = [str(c).strip() for c in df.columns]
|
|
|
|
|
# If the sheet has merged cells -> forward fill Location
|
|
|
|
|
if "Location" in df.columns:
|
|
|
|
|
df["Location"] = df["Location"].ffill()
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-13 18:50:27 +05:30
|
|
|
df = df.dropna(how="all") # REMOVE empty rows
|
|
|
|
|
# Identify missing location rows before insert
|
|
|
|
|
missing_loc = df[df["Location"].isna() | (df["Location"].astype(str).str.strip() == "")]
|
|
|
|
|
if not missing_loc.empty:
|
|
|
|
|
return False, f"Error: Some rows have empty Location. Rows: {missing_loc.index.tolist()}"
|
|
|
|
|
|
|
|
|
|
saved_count = 0
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for index, row in df.iterrows():
|
|
|
|
|
record_data = {}
|
|
|
|
|
# Insert only fields that exist in model
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
if hasattr(TrenchExcavationClient, col):
|
|
|
|
|
value = row[col]
|
|
|
|
|
# Normalize empty values
|
|
|
|
|
if pd.isna(value) or str(value).strip() in ["", "-", "—", "nan", "NaN"]:
|
|
|
|
|
value = None
|
|
|
|
|
|
|
|
|
|
record_data[col] = value
|
|
|
|
|
|
|
|
|
|
record = TrenchExcavationClient(
|
|
|
|
|
subcontractor_id=subcontractor_id,
|
|
|
|
|
**record_data
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.add(record)
|
|
|
|
|
saved_count += 1
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
return True, f"Clinnt Tr Ex data saved successfully. Total rows: {saved_count}"
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
return False, f"Clinnt Tr Ex Save Failed: {e}"
|
|
|
|
|
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
# Mh Ex save method (ManholeExcavationClient model)
|
|
|
|
|
def client_manhole_excavation(self, df, subcontractor_id):
|
|
|
|
|
# Clean column names (strip whitespace)
|
|
|
|
|
df.columns = [str(c).strip() for c in df.columns]
|
|
|
|
|
# If the sheet has merged cells -> forward fill Location
|
|
|
|
|
if "Location" in df.columns:
|
|
|
|
|
df["Location"] = df["Location"].ffill()
|
|
|
|
|
|
|
|
|
|
# REMOVE empty rows
|
|
|
|
|
df = df.dropna(how="all")
|
|
|
|
|
# Identify missing location rows before insert
|
|
|
|
|
missing_loc = df[df["Location"].isna() | (df["Location"].astype(str).str.strip() == "")]
|
|
|
|
|
if not missing_loc.empty:
|
|
|
|
|
return False, f"Error: Some rows have empty Location. Rows: {missing_loc.index.tolist()}"
|
|
|
|
|
|
|
|
|
|
saved_count = 0
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for index, row in df.iterrows():
|
|
|
|
|
record_data = {}
|
|
|
|
|
# Insert only fields that exist in model
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
if hasattr(ManholeExcavationClient, col):
|
|
|
|
|
value = row[col]
|
|
|
|
|
|
|
|
|
|
# Normalize empty values
|
|
|
|
|
if pd.isna(value) or str(value).strip() in ["", "-", "—", "nan", "NaN"]:
|
|
|
|
|
value = None
|
|
|
|
|
|
|
|
|
|
record_data[col] = value
|
|
|
|
|
|
|
|
|
|
record = ManholeExcavationClient(
|
|
|
|
|
subcontractor_id=subcontractor_id,
|
|
|
|
|
**record_data
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
db.session.add(record)
|
|
|
|
|
saved_count += 1
|
|
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
return True, f" Client Mh Ex. data saved successfully. Total rows: {saved_count}"
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
return False, f"Client Mh Ex. Save Failed: {e}"
|
2025-12-13 18:50:27 +05:30
|
|
|
|
|
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
# Mh and Dc save method (ManholeDomesticChamberClient model)
|
|
|
|
|
def client_manhole_domestic_chamber(self, df, subcontractor_id):
|
|
|
|
|
# Clean column names (strip whitespace)
|
|
|
|
|
df.columns = [str(c).strip() for c in df.columns]
|
|
|
|
|
# If the sheet has merged cells -> forward fill Location
|
|
|
|
|
if "Location" in df.columns:
|
|
|
|
|
df["Location"] = df["Location"].ffill()
|
|
|
|
|
|
|
|
|
|
# REMOVE empty rows
|
|
|
|
|
df = df.dropna(how="all")
|
|
|
|
|
# Identify missing location rows before insert
|
|
|
|
|
missing_loc = df[df["Location"].isna() | (df["Location"].astype(str).str.strip() == "")]
|
|
|
|
|
if not missing_loc.empty:
|
|
|
|
|
return False, f"Error: Some rows have empty Location. Rows: {missing_loc.index.tolist()}"
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
saved_count = 0
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
try:
|
|
|
|
|
for index, row in df.iterrows():
|
|
|
|
|
record_data = {}
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
# Insert only fields that exist in model
|
|
|
|
|
for col in df.columns:
|
|
|
|
|
if hasattr(ManholeDomesticChamberClient, col):
|
|
|
|
|
value = row[col]
|
|
|
|
|
# Normalize empty values
|
|
|
|
|
if pd.isna(value) or str(value).strip() in ["", "-", "—", "nan", "NaN"]:
|
|
|
|
|
value = None
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
record_data[col] = value
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
record = ManholeDomesticChamberClient(
|
|
|
|
|
subcontractor_id=subcontractor_id,
|
|
|
|
|
**record_data
|
|
|
|
|
)
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
db.session.add(record)
|
|
|
|
|
saved_count += 1
|
2025-12-13 18:50:27 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
db.session.commit()
|
|
|
|
|
return True, f"Mh and Dc data saved successfully. Total rows: {saved_count}"
|
2025-12-11 17:58:56 +05:30
|
|
|
|
2025-12-14 18:04:03 +05:30
|
|
|
except Exception as e:
|
|
|
|
|
db.session.rollback()
|
|
|
|
|
return False, f"Mh and Dc data Save Failed: {e}"
|
|
|
|
|
|