Advertisement
johnkris

Untitled

Jul 2nd, 2025
484
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 64.76 KB | Source Code | 0 0
  1. import tkinter as tk
  2. from tkinter import ttk, messagebox, filedialog
  3. from pathlib import Path
  4. import shutil, pandas as pd, matplotlib.pyplot as plt, numpy as np, datetime, pytz, scipy.stats, threading, time
  5. from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
  6. try:
  7.     from PIL import Image, ImageTk, ImageResampling
  8.     LANCZOS = ImageResampling.LANCZOS
  9. except:
  10.     from PIL import Image, ImageTk
  11.     LANCZOS = Image.LANCZOS
  12. import subprocess, logging
  13. logging.basicConfig(level=logging.DEBUG, format='%(asctime)s-%(levelname)s-%(message)s')
  14.  
  15. class SeismicWaveAnalyzer:
  16.     def __init__(self, root):
  17.         self.root = root
  18.         self.root.withdraw()
  19.         self.data_dir = Path.home() / "SeismicWaveData"
  20.         self.out_dir = self.data_dir / "Outputs"
  21.         self.data_dir.mkdir(exist_ok=True)
  22.         self.out_dir.mkdir(exist_ok=True)
  23.         self.show_splash()
  24.  
  25.     def show_splash(self):
  26.         s = tk.Toplevel()
  27.         s.title("Welcome")
  28.         s.configure(bg="#fff")
  29.         s.attributes('-fullscreen', True)
  30.         s.overrideredirect(True)
  31.         ttk.Button(s, text="Skip", command=lambda: self.launch_main(s), style="Custom.TButton").place(relx=.95, rely=.95, anchor="se")
  32.         try:
  33.             img = Image.open(self.data_dir / "picture1.png").resize((200, 200), LANCZOS)
  34.             p = ImageTk.PhotoImage(img)
  35.             tk.Label(s, image=p, bg="#fff").place(x=20, y=20)
  36.             img_label = tk.Label(s, image=p)
  37.             img_label.image = p
  38.         except:
  39.             tk.Label(s, text="[Image 1]", width=20, height=10, bg="#d3d3d3").place(x=20, y=20)
  40.         try:
  41.             img = Image.open(self.data_dir / "picture2.png").resize((200, 200), LANCZOS)
  42.             p = ImageTk.PhotoImage(img)
  43.             tk.Label(s, image=p, bg="#fff").place(relx=1.0, y=20, anchor="ne")
  44.             img_label = tk.Label(s, image=p)
  45.             img_label.image = p
  46.         except:
  47.             tk.Label(s, text="[Image 2]", width=20, height=10, bg="#d3d3d3").place(relx=1.0, y=20, anchor="ne")
  48.         f = tk.Frame(s, bg="#fff")
  49.         f.place(relx=.5, rely=.5, anchor="center")
  50.         tk.Label(f, text="Seismic Wave Analysis", font=("Segoe UI", 20, "bold"), bg="#fff").pack(pady=10)
  51.         tk.Label(f, text="FTT Mode Project", font=("Segoe UI", 16), bg="#fff").pack(pady=5)
  52.         tk.Label(f, text="Instrumentation & Eng. Geophysics", font=("Segoe UI", 14), bg="#fff").pack(pady=5)
  53.         tk.Label(f, text="Dr N Satyavani, Lead", font=("Segoe UI", 14, "italic"), bg="#fff").pack(pady=5)
  54.         s.after(6000, lambda: self.launch_main(s))
  55.  
  56.     def launch_main(self, s):
  57.         s.destroy()
  58.         self.root.deiconify()
  59.         self.root.title("Seismic Wave Analysis Tool")
  60.         self.root.geometry("1000x700")
  61.         self.root.state('zoomed')
  62.         self.theme = {"bg": "#fff", "fg": "#212121", "entry_bg": "#f5f5f5", "accent": "#003087", "btn_fg": "#fff"}
  63.         self.root.configure(bg=self.theme["bg"])
  64.         self.raw_tree = self.qual_tree = self.ana_tree = self.sum_tree = self.lay_tree = self.corr_tree = self.rt_tree = self.plot_canvas = self.cur_fig = None
  65.         self.cur_data = self.raw_data = self.cur_file = self.deviations = self.qual_issues = self.layers = self.corr_data = self.rt_data = None
  66.         self.plot_label = ttk.Label(self.root)
  67.         self.sel_rows = {}
  68.         self.min_d_var = tk.StringVar()
  69.         self.max_d_var = tk.StringVar()
  70.         self.rt_running = False
  71.         self.plot_opts = {k: tk.BooleanVar(value=True) for k in ["Vp/Vs Ratio", "Poisson's Ratio", "Shear Modulus", "Bulk Modulus", "Young's Modulus", "Travel Time Deviations"]}
  72.         self.setup_gui()
  73.         self.update_clock()
  74.  
  75.     def setup_gui(self):
  76.         m = tk.Menu(self.root)
  77.         self.root.config(menu=m)
  78.         fm = tk.Menu(m, tearoff=0)
  79.         m.add_cascade(label="File", menu=fm)
  80.         fm.add_command(label="Exit", command=self.root.quit)
  81.         ff = ttk.Frame(self.root)
  82.         ff.pack(pady=10, padx=10, fill=tk.X)
  83.         ttk.Label(ff, text="Upload CHST:", font=("Segoe UI", 10)).pack(side=tk.LEFT, padx=5)
  84.         ttk.Button(ff, text="Upload", command=self.upload_file, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  85.         ttk.Button(ff, text="Create Template", command=self.create_template, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  86.         ttk.Button(ff, text="Import CSV to PDF", command=self.import_csv_pdf, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  87.         ttk.Button(ff, text="Refresh", command=self.refresh_files, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  88.         ttk.Label(ff, text="Select File:", font=("Segoe UI", 10)).pack(side=tk.LEFT, padx=5)
  89.         self.file_var = tk.StringVar()
  90.         self.file_cb = ttk.Combobox(ff, textvariable=self.file_var, state="readonly")
  91.         self.file_cb.pack(side=tk.LEFT, padx=5)
  92.         self.file_cb.bind("<<ComboboxSelected>>", self.load_file)
  93.         self.nb = ttk.Notebook(self.root)
  94.         self.nb.pack(pady=10, padx=10, fill=tk.BOTH, expand=True)
  95.         self.raw_f = ttk.Frame(self.nb)
  96.         self.nb.add(self.raw_f, text="Raw Data")
  97.         self.setup_raw_tab()
  98.         self.qual_f = ttk.Frame(self.nb)
  99.         self.nb.add(self.qual_f, text="Quality Check")
  100.         self.setup_qual_tab()
  101.         self.ana_f = ttk.Frame(self.nb)
  102.         self.nb.add(self.ana_f, text="Analysis")
  103.         self.setup_ana_tab()
  104.         self.sum_f = ttk.Frame(self.nb)
  105.         self.nb.add(self.sum_f, text="Summary")
  106.         self.setup_sum_tab()
  107.         self.lay_f = ttk.Frame(self.nb)
  108.         self.nb.add(self.lay_f, text="Layers")
  109.         self.setup_lay_tab()
  110.         self.corr_f = ttk.Frame(self.nb)
  111.         self.nb.add(self.corr_f, text="Correlation Analysis")
  112.         self.setup_corr_tab()
  113.         self.rt_f = ttk.Frame(self.nb)
  114.         self.nb.add(self.rt_f, text="Real-Time")
  115.         self.setup_rt_tab()
  116.         self.viz_f = ttk.Frame(self.nb)
  117.         self.nb.add(self.viz_f, text="Visualizations")
  118.         self.setup_viz_tab()
  119.         ef = ttk.Frame(self.root)
  120.         ef.pack(pady=5, fill=tk.X)
  121.         for t, c in [("Raw Data", self.export_raw), ("Quality Check", self.export_qual), ("Analysis to PDF", self.export_ana_pdf), ("Summary", self.export_sum), ("Layers", self.export_lay), ("Correlation", self.export_corr), ("Detailed Report", self.export_report)]:
  122.             ttk.Button(ef, text=f"Export {t} to CSV", command=c, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  123.         sf = ttk.Frame(self.root)
  124.         sf.pack(side=tk.BOTTOM, fill=tk.X)
  125.         self.status_var = tk.StringVar()
  126.         tk.Label(sf, textvariable=self.status_var, bd=1, relief=tk.SUNKEN, anchor=tk.W, font=("Segoe UI", 10)).pack(side=tk.LEFT, fill=tk.X, expand=True)
  127.         self.clock_var = tk.StringVar()
  128.         tk.Label(sf, textvariable=self.clock_var, bd=1, relief=tk.SUNKEN, anchor=tk.E, font=("Segoe UI", 10)).pack(side=tk.RIGHT)
  129.         self.apply_theme()
  130.         self.refresh_files()
  131.  
  132.     def apply_theme(self):
  133.         s = ttk.Style()
  134.         s.theme_use('clam')
  135.         s.configure("Custom.TButton", background=self.theme["accent"], foreground=self.theme["btn_fg"], font=("Segoe UI", 10), padding=8)
  136.         s.map("Custom.TButton", background=[('active', '#002070')], foreground=[('active', '#fff')])
  137.         s.configure("TCombobox", fieldbackground=self.theme["entry_bg"], foreground=self.theme["fg"])
  138.         s.configure("Treeview", background=self.theme["entry_bg"], foreground=self.theme["fg"], fieldbackground=self.theme["entry_bg"])
  139.         s.configure("Treeview.Heading", background=self.theme["bg"], foreground=self.theme["fg"])
  140.  
  141.     def update_clock(self):
  142.         self.clock_var.set(datetime.now(pytz.timezone('Asia/Kolkata')).strftime("%a, %b %d, %Y %I:%M %p IST"))
  143.         self.root.after(1000, self.update_clock)
  144.  
  145.     def setup_raw_tab(self):
  146.         sf = ttk.Frame(self.raw_f)
  147.         sf.pack(fill=tk.X, pady=5)
  148.         ttk.Label(sf, text="Depth Range (m):").pack(side=tk.LEFT, padx=5)
  149.         ttk.Entry(sf, textvariable=self.min_d_var, width=10).pack(side=tk.LEFT, padx=5)
  150.         ttk.Label(sf, text="to").pack(side=tk.LEFT)
  151.         ttk.Entry(sf, textvariable=self.max_d_var, width=10).pack(side=tk.LEFT, padx=5)
  152.         ttk.Button(sf, text="Apply", command=self.apply_sel, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  153.         ttk.Button(sf, text="Reset", command=self.reset_sel, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  154.         f = ttk.Frame(self.raw_f)
  155.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  156.         c = tk.Canvas(f)
  157.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  158.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  159.         sf = ttk.Frame(c)
  160.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  161.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  162.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  163.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  164.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  165.         c.create_window((0, 0), window=sf, anchor="nw")
  166.         self.raw_tree = ttk.Treeview(sf, show="headings")
  167.         self.raw_tree.pack(fill=tk.BOTH, expand=True)
  168.  
  169.     def setup_qual_tab(self):
  170.         f = ttk.Frame(self.qual_f)
  171.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  172.         c = tk.Canvas(f)
  173.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  174.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  175.         sf = ttk.Frame(c)
  176.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  177.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  178.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  179.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  180.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  181.         c.create_window((0, 0), window=sf, anchor="nw")
  182.         self.qual_tree = ttk.Treeview(sf, show="headings")
  183.         self.qual_tree.pack(fill=tk.BOTH, expand=True)
  184.  
  185.     def setup_ana_tab(self):
  186.         f = ttk.Frame(self.ana_f)
  187.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  188.         cf = ttk.Frame(f)
  189.         cf.pack(fill=tk.X, pady=5)
  190.         b = ttk.Button(cf, text="Run Monte Carlo", command=self.run_mc, style="Custom.TButton")
  191.         b.pack(side=tk.LEFT, padx=5)
  192.         b.bind("<Enter>", lambda e: self.show_tip(b, "Runs 1000 iterations with 1% noise"))
  193.         b.bind("<Leave>", lambda e: self.hide_tip())
  194.         c = tk.Canvas(f)
  195.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  196.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  197.         sf = ttk.Frame(c)
  198.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  199.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  200.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  201.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  202.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  203.         c.create_window((0, 0), window=sf, anchor="nw")
  204.         self.ana_tree = ttk.Treeview(sf, show="headings")
  205.         self.ana_tree.pack(fill=tk.BOTH, expand=True)
  206.  
  207.     def show_tip(self, w, t):
  208.         self.tip = tk.Toplevel(w)
  209.         self.tip.wm_overrideredirect(True)
  210.         self.tip.wm_geometry(f"+{w.winfo_rootx()+20}+{w.winfo_rooty()+20}")
  211.         tk.Label(self.tip, text=t, background="yellow", relief="solid", borderwidth=1, font=("Segoe UI", 10)).pack()
  212.  
  213.     def hide_tip(self):
  214.         if hasattr(self, 'tip'):
  215.             self.tip.destroy()
  216.  
  217.     def setup_sum_tab(self):
  218.         f = ttk.Frame(self.sum_f)
  219.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  220.         c = tk.Canvas(f)
  221.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  222.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  223.         sf = ttk.Frame(c)
  224.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  225.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  226.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  227.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  228.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  229.         c.create_window((0, 0), window=sf, anchor="nw")
  230.         self.sum_tree = ttk.Treeview(sf, show="headings")
  231.         self.sum_tree.pack(fill=tk.BOTH, expand=True)
  232.  
  233.     def setup_lay_tab(self):
  234.         f = ttk.Frame(self.lay_f)
  235.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  236.         c = tk.Canvas(f)
  237.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  238.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  239.         sf = ttk.Frame(c)
  240.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  241.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  242.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  243.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  244.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  245.         c.create_window((0, 0), window=sf, anchor="nw")
  246.         self.lay_tree = ttk.Treeview(sf, show="headings")
  247.         self.lay_tree.pack(fill=tk.BOTH, expand=True)
  248.  
  249.     def setup_corr_tab(self):
  250.         f = ttk.Frame(self.corr_f)
  251.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  252.         cf = ttk.Frame(f)
  253.         cf.pack(fill=tk.X, pady=5)
  254.         ttk.Button(cf, text="Upload SPT/Sonic", command=self.upload_corr, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  255.         ttk.Button(cf, text="Run Correlation", command=self.run_corr, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  256.         c = tk.Canvas(f)
  257.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  258.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  259.         sf = ttk.Frame(c)
  260.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  261.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  262.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  263.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  264.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  265.         c.create_window((0, 0), window=sf, anchor="nw")
  266.         self.corr_tree = ttk.Treeview(sf, show="headings")
  267.         self.corr_tree.pack(fill=tk.BOTH, expand=True)
  268.         self.corr_plot_label = ttk.Label(sf, text="Upload SPT/Sonic to view correlations.")
  269.         self.corr_plot_label.pack(fill=tk.BOTH, expand=True)
  270.  
  271.     def setup_rt_tab(self):
  272.         f = ttk.Frame(self.rt_f)
  273.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  274.         cf = ttk.Frame(f)
  275.         cf.pack(fill=tk.X, pady=5)
  276.         ttk.Button(cf, text="Start Real-Time", command=self.start_rt, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  277.         ttk.Button(cf, text="Stop Real-Time", command=self.stop_rt, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  278.         c = tk.Canvas(f)
  279.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=c.yview)
  280.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=c.xview)
  281.         sf = ttk.Frame(c)
  282.         sf.bind("<Configure>", lambda e: c.configure(scrollregion=c.bbox("all")))
  283.         c.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  284.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  285.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  286.         c.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  287.         c.create_window((0, 0), window=sf, anchor="nw")
  288.         self.rt_tree = ttk.Treeview(sf, show="headings")
  289.         self.rt_tree.pack(fill=tk.BOTH, expand=True)
  290.         self.rt_plot_label = ttk.Label(sf, text="Start real-time to view data.")
  291.         self.rt_plot_label.pack(fill=tk.BOTH, expand=True)
  292.  
  293.     def setup_viz_tab(self):
  294.         f = ttk.Frame(self.viz_f)
  295.         f.pack(pady=10, fill=tk.BOTH, expand=True)
  296.         of = ttk.Frame(f)
  297.         of.pack(fill=tk.X, pady=5)
  298.         ttk.Label(of, text="Plot Params:", font=("Segoe UI", 10)).pack(side=tk.LEFT, padx=5)
  299.         for p, v in self.plot_opts.items():
  300.             ttk.Checkbutton(of, text=p, variable=v, command=self.update_plots).pack(side=tk.LEFT, padx=5)
  301.         df = ttk.Frame(of)
  302.         df.pack(side=tk.LEFT, padx=10)
  303.         ttk.Label(df, text="Depth Range (m):").pack(side=tk.LEFT)
  304.         ttk.Entry(df, textvariable=self.min_d_var, width=10).pack(side=tk.LEFT, padx=5)
  305.         ttk.Label(df, text="to").pack(side=tk.LEFT)
  306.         ttk.Entry(df, textvariable=self.max_d_var, width=10).pack(side=tk.LEFT, padx=5)
  307.         ttk.Button(df, text="Update Plot", command=self.update_plots, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  308.         ttk.Button(of, text="Save Plots", command=self.save_plots, style="Custom.TButton").pack(side=tk.RIGHT, padx=5)
  309.         self.canvas = tk.Canvas(f)
  310.         sy = ttk.Scrollbar(f, orient=tk.VERTICAL, command=self.canvas.yview)
  311.         sx = ttk.Scrollbar(f, orient=tk.HORIZONTAL, command=self.canvas.xview)
  312.         self.scroll_f = ttk.Frame(self.canvas)
  313.         self.scroll_f.bind("<Configure>", lambda e: self.canvas.configure(scrollregion=self.canvas.bbox("all")))
  314.         self.canvas.configure(yscrollcommand=sy.set, xscrollcommand=sx.set)
  315.         sy.pack(side=tk.RIGHT, fill=tk.Y)
  316.         sx.pack(side=tk.BOTTOM, fill=tk.X)
  317.         self.canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  318.         self.canvas.create_window((0, 0), window=self.scroll_f, anchor="nw")
  319.         self.plot_label.pack(fill=tk.BOTH, expand=True)
  320.        def export_report(self):
  321.             if not shutil.which('latexmk'):
  322.                 messagebox.showerror("Error", "LaTeX not installed.")
  323.                 return
  324.     try:
  325.                 adf = self.analyze_data(self.cur_data)
  326.                 ts = datetime.now().strftime('%Y%m%d_%H%M')
  327.                 od = self.out_dir / ts
  328.                 od.mkdir(exist_ok=True)
  329.                 fn = Path(self.cur_file).stem
  330.                 tp = od / f"{fn}_detailed_report_{ts}.tex"
  331.                 lc = r"""\documentclass[a4paper,12pt]{article}
  332. \usepackage{booktabs,longtable,geometry,amsmath,amsfonts,noto}
  333. \geometry{margin=1in}
  334. \begin{document}
  335. \section*{Detailed Seismic Analysis Report}
  336. \subsection*{Analysis Data}
  337. \begin{longtable}{@{}""" + "c" * len(adf.columns) + r"""@{}}
  338. \toprule """ + r" & ".join([f"\\textbf{{{c}}}" for c in adf.columns]) + r"""\\\midrule\endhead"""
  339.         for _, r in adf.iterrows():
  340.             lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) if pd.notna(v) else "" for v in r]) + r"\\\midrule" + "\n"
  341.         lc += r"\bottomrule\end{longtable}"
  342.         if self.qual_issues is not None and not self.qual_issues.empty:
  343.             lc += r"\section*{Quality Check Issues}\begin{longtable}{@{}" + "c" * len(self.qual_issues.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in self.qual_issues.columns]) + r"\\\midrule\endhead"
  344.             for _, r in self.qual_issues.iterrows():
  345.                 lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  346.             lc += r"\bottomrule\end{longtable}"
  347.         p = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio', 'Poisson’s Ratio', 'Shear Modulus (MPa)', 'Bulk Modulus (MPa)', 'Young’s Modulus (MPa)']
  348.         vd = adf[p].dropna()
  349.         sd = {
  350.             'Parameter': p,
  351.             'Mean': [vd[c].mean() if not vd[c].empty else np.nan for c in p],
  352.             'Median': [vd[c].median() if not vd[c].empty else np.nan for c in p],
  353.             'Std Dev': [vd[c].std() if not vd[c].empty else np.nan for c in p],
  354.             'Min': [vd[c].min() if not vd[c].empty else np.nan for c in p],
  355.             'Max': [vd[c].max() if not vd[c].empty else np.nan for c in p]
  356.         }
  357.         sdf = pd.DataFrame(sd)
  358.         lc += r"\section*{Summary Statistics}\begin{longtable}{@{}" + "c" * len(sdf.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in sdf.columns]) + r"\\\midrule\endhead"
  359.         for _, r in sdf.iterrows():
  360.             lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  361.         lc += r"\bottomrule\end{longtable}"
  362.         if self.layers is not None and not self.layers.empty:
  363.             lc += r"\section*{Identified Layers}\begin{longtable}{@{}" + "c" * len(self.layers.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in self.layers.columns]) + r"\\\midrule\endhead"
  364.             for _, r in self.layers.iterrows():
  365.                 lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  366.             lc += r"\bottomrule\end{longtable}"
  367.         if self.corr_res is not None and not self.corr_res.empty:
  368.             lc += r"\section*{Correlation Results}\begin{longtable}{@{}" + "c" * len(self.corr_res.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in self.corr_res.columns]) + r"\\\midrule\endhead"
  369.             for _, r in self.corr_res.iterrows():
  370.                 lc += r" & ".join([f"{v:.3f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  371.             lc += r"\bottomrule\end{longtable}"
  372.         lc += r"\end{document}"
  373.         with open(tp, 'w', encoding='utf-8') as f:
  374.             f.write(lc)
  375.         subprocess.run(['latexmk', '-pdf', f'-outdir={od}', str(tp)], capture_output=True, text=True, check=True)
  376.         pp = od / f"{fn}_detailed_report_{ts}.pdf"
  377.         if pp.exists():
  378.             self.status_var.set(f"Exported report to {pp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  379.             messagebox.showinfo("Success", f"Report to {pp}")
  380.         else:
  381.             raise Exception("PDF failed")
  382.     except Exception as e:
  383.         messagebox.showerror("Error", f"Export failed: {str(e)}")
  384.         logging.error(f"Report export failed: {str(e)}")
  385.  
  386.     def export_ana_pdf(self):
  387.         if not shutil.which('latexmk'):
  388.             messagebox.showerror("Error", "LaTeX not installed.")
  389.             return
  390.         if self.cur_data is None:
  391.             messagebox.showwarning("No Data", "Load file.")
  392.             return
  393.         try:
  394.             df = self.analyze_data(self.cur_data)
  395.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  396.             od = self.out_dir / ts
  397.             od.mkdir(exist_ok=True)
  398.             fn = Path(self.cur_file).stem
  399.             tp = od / f"{fn}_analysis_{ts}.tex"
  400.             lc = r"\documentclass[a4paper,12pt]{article}\usepackage{booktabs,longtable,geometry,amsmath,amsfonts,noto}\geometry{margin=1in}\begin{document}\section*{Analysis Report}\begin{longtable}{@{}" + "c" * len(df.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in df.columns]) + r"\\\midrule\endhead"
  401.             for _, r in df.iterrows():
  402.                 lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) if pd.notna(v) else "" for v in r]) + r"\\\midrule" + "\n"
  403.             lc += r"\bottomrule\end{longtable}\end{document}"
  404.             with open(tp, 'w', encoding='utf-8') as f:
  405.                 f.write(lc)
  406.             subprocess.run(['latexmk', '-pdf', f'-outdir={od}', str(tp)], capture_output=True, text=True, check=True)
  407.             pp = od / f"{fn}_analysis_{ts}.pdf"
  408.             if pp.exists():
  409.                 self.status_var.set(f"Exported analysis to PDF: {pp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  410.                 messagebox.showinfo("Success", f"Exported to {pp}")
  411.             else:
  412.                 raise Exception("PDF failed")
  413.         except Exception as e:
  414.             messagebox.showerror("Error", f"Export failed: {str(e)}")
  415.             logging.error(f"Analysis PDF failed: {str(e)}")
  416.  
  417.     def refresh_files(self):
  418.         files = [f for f in self.data_dir.glob("*.[cx][sv][l]*")]
  419.         self.file_cb['values'] = [f.name for f in files]
  420.         self.file_var.set("")
  421.         self.clear_tabs()
  422.         self.status_var.set("Select file.")
  423.  
  424.     def upload_file(self):
  425.         fp = filedialog.askopenfilename(filetypes=[("Excel/CSV", "*.xlsx *.csv"), ("Excel", "*.xlsx"), ("CSV", "*.csv"), ("All", "*.*")])
  426.         if fp:
  427.             try:
  428.                 shutil.copy(fp, self.data_dir / Path(fp).name)
  429.                 self.refresh_files()
  430.                 self.status_var.set(f"Uploaded: {Path(fp).name} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  431.             except Exception as e:
  432.                 messagebox.showerror("Error", f"Upload failed: {str(e)}")
  433.                 logging.error(f"Upload failed: {str(e)}")
  434.  
  435.     def create_template(self):
  436.         try:
  437.             df = pd.DataFrame([[3.0, 100.0, 7.91, 16.81], [4.5, 98.5, 8.0, 17.0], [6.0, 97.0, 8.5, 17.5]],
  438.                              columns=["Depth (m)", "Elevation (m)", "P wave time (ms)", "S wave time (ms)"])
  439.             p = self.data_dir / "template_seismic_data.xlsx"
  440.             df.to_excel(p, index=False)
  441.             self.refresh_files()
  442.             self.status_var.set(f"Created template: template_seismic_data.xlsx at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  443.             messagebox.showinfo("Success", f"Template created: template_seismic_data.xlsx")
  444.         except Exception as e:
  445.             messagebox.showerror("Error", f"Template failed: {str(e)}")
  446.             logging.error(f"Template failed: {str(e)}")
  447.  
  448.     def find_header_row(self, df):
  449.         for i, r in df.iterrows():
  450.             rv = [str(v).strip() if pd.notna(v) else "" for v in r.values]
  451.             if "Depth" in rv:
  452.                 di = rv.index("Depth")
  453.                 if i + 1 < len(df) and str(df.iloc[i + 1, di]).strip() == "(m)":
  454.                     return i
  455.         return None
  456.  
  457.     def load_file(self, e=None):
  458.         sf = self.file_var.get()
  459.         if not sf:
  460.             return
  461.         self.clear_tabs()
  462.         self.cur_file = sf
  463.         fp = self.data_dir / sf
  464.         try:
  465.             if sf.endswith('.xlsx'):
  466.                 xl = pd.ExcelFile(fp)
  467.                 for sn in xl.sheet_names:
  468.                     df = pd.read_excel(fp, sheet_name=sn, header=None)
  469.                     hr = self.find_header_row(df)
  470.                     if hr is not None:
  471.                         df = pd.read_excel(fp, sheet_name=sn, header=[hr, hr + 1])
  472.                         break
  473.                 else:
  474.                     raise ValueError("No 'Depth' followed by '(m)'")
  475.                 cn = []
  476.                 cc = {}
  477.                 for c in df.columns:
  478.                     cn1 = f"{c[0]} {c[1]}".strip() if c[1] and pd.notna(c[1]) else c[0]
  479.                     cc[cn1] = cc.get(cn1, 0) + 1
  480.                     cn.append(f"{cn1} {cc[cn1]}" if cc[cn1] > 1 else cn1)
  481.                 df.columns = cn
  482.             elif sf.endswith('.csv'):
  483.                 dt = pd.read_csv(fp, header=None)
  484.                 hr = self.find_header_row(dt)
  485.                 if hr is None:
  486.                     raise ValueError("No 'Depth' followed by '(m)'")
  487.                 df = pd.read_csv(fp, header=[hr, hr + 1])
  488.                 cn = []
  489.                 cc = {}
  490.                 for c in df.columns:
  491.                     cn1 = f"{c[0]} {c[1]}".strip() if c[1] and pd.notna(c[1]) else c[0]
  492.                     cc[cn1] = cc.get(cn1, 0) + 1
  493.                     cn.append(f"{cn1} {cc[cn1]}" if cc[cn1] > 1 else cn1)
  494.                 df.columns = cn
  495.             else:
  496.                 raise ValueError("Unsupported format")
  497.             df.columns = [c.replace('\n', ' ') for c in df.columns]
  498.             df.columns = ['Depth (m)' if c.startswith('Depth') else c for c in df.columns]
  499.             ec = [c for c in df.columns if c.startswith('Elevation')]
  500.             if ec:
  501.                 ve = next((c for c in ec if df[c].notna().any()), None)
  502.                 if ve:
  503.                     df.rename(columns={ve: 'Elevation (m)'}, inplace=True)
  504.                     df.drop(columns=[c for c in ec if c != ve], inplace=True)
  505.             df = df.dropna(axis=1, how='all')
  506.             df = df.loc[:, (df.notna().any()) | (df.columns == 'Depth (m)')].dropna(subset=df.columns.difference(['Depth (m)']), how='all')
  507.             self.raw_data = df[df['Depth (m)'].notna()].copy()
  508.             self.cur_data = self.raw_data.copy()
  509.             if not all(c in df.columns for c in ['Depth (m)', 'Elevation (m)']):
  510.                 raise ValueError("Missing Depth/Elevation")
  511.             self.display_raw(self.raw_data)
  512.             adf = self.analyze_data(self.cur_data)
  513.             self.display_ana(adf)
  514.             self.check_qual(self.cur_data)
  515.             self.display_qual()
  516.             self.display_sum(adf)
  517.             self.identify_layers(adf)
  518.             self.display_lay()
  519.             self.plot_viz(self.cur_data, adf)
  520.             self.status_var.set(f"Loaded: {sf} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  521.             logging.info(f"Loaded file {sf} with {len(self.cur_data)} rows")
  522.         except Exception as e:
  523.             messagebox.showerror("Error", f"Load failed: {str(e)}")
  524.             logging.error(f"Load failed: {str(e)}")
  525.             self.clear_tabs()
  526.  
  527.     def clear_tabs(self):
  528.         for t in [self.raw_tree, self.qual_tree, self.ana_tree, self.sum_tree, self.lay_tree, self.corr_tree, self.rt_tree]:
  529.             if t:
  530.                 for i in t.get_children():
  531.                     t.delete(i)
  532.         if hasattr(self, 'plot_canvas') and self.plot_canvas:
  533.             self.plot_canvas.get_tk_widget().destroy()
  534.         if hasattr(self, 'cur_fig') and self.cur_fig:
  535.             plt.close(self.cur_fig)
  536.         self.plot_label.pack(fill=tk.BOTH, expand=True)
  537.         self.cur_data = self.raw_data = self.cur_file = self.deviations = self.qual_issues = self.layers = self.corr_data = self.rt_data = self.cur_fig = None
  538.         self.sel_rows = {}
  539.  
  540.     def on_select_row(self, e):
  541.         i = self.raw_tree.identify_row(e.y)
  542.         if not i:
  543.             return
  544.         idx = int(self.raw_tree.index(i))
  545.         self.sel_rows[idx] = not self.sel_rows.get(idx, False)
  546.         self.raw_tree.set(i, "Select", "✔" if self.sel_rows[idx] else "")
  547.  
  548.     def display_raw(self, df):
  549.         for i in self.raw_tree.get_children():
  550.             self.raw_tree.delete(i)
  551.         c = ["Select"] + list(df.columns)
  552.         self.raw_tree["columns"] = c
  553.         for col in c:
  554.             self.raw_tree.heading(col, text=col)
  555.             self.raw_tree.column(col, anchor=tk.CENTER, stretch=True)
  556.         self.sel_rows = {}
  557.         for idx, r in df.iterrows():
  558.             self.raw_tree.insert("", tk.END, values=[""] + [str(v) if pd.notna(v) else "" for v in r])
  559.             self.sel_rows[idx] = False
  560.         self.raw_tree.column("Select", width=50, minwidth=50)
  561.         self.raw_tree.bind("<Button-1>", self.on_select_row)
  562.         for col in c[1:]:
  563.             dl = [len(str(r.get(col, ""))) for _, r in df.iterrows()][:100]
  564.             self.raw_tree.column(col, width=max(dl, default=0) * 10, minwidth=150)
  565.  
  566.     def apply_sel(self):
  567.         if self.raw_data is None:
  568.             return
  569.         df = self.raw_data.copy()
  570.         try:
  571.             md = float(self.min_d_var.get()) if self.min_d_var.get().strip() else df['Depth (m)'].min()
  572.             xd = float(self.max_d_var.get()) if self.max_d_var.get().strip() else df['Depth (m)'].max()
  573.             if md > xd:
  574.                 md, xd = xd, md
  575.             df = df[(df['Depth (m)'] >= md) & (df['Depth (m)'] <= xd)]
  576.         except ValueError:
  577.             messagebox.showwarning("Invalid", "Enter valid depth.")
  578.             return
  579.         si = [i for i, s in self.sel_rows.items() if s]
  580.         if si:
  581.             df = df.iloc[si]
  582.         self.cur_data = df
  583.         adf = self.analyze_data(df)
  584.         self.display_ana(adf)
  585.         self.check_qual(df)
  586.         self.display_qual()
  587.         self.display_sum(adf)
  588.         self.identify_layers(adf)
  589.         self.display_lay()
  590.         self.plot_viz(df, adf)
  591.         self.status_var.set(f"Selected: {len(df)} rows at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  592.  
  593.     def reset_sel(self):
  594.         if self.raw_data is None:
  595.             return
  596.         self.cur_data = self.raw_data.copy()
  597.         self.min_d_var.set("")
  598.         self.max_d_var.set("")
  599.         self.sel_rows = {i: False for i in range(len(self.raw_data))}
  600.         for i in self.raw_tree.get_children():
  601.             self.raw_tree.set(i, "Select", "")
  602.         adf = self.analyze_data(self.cur_data)
  603.         self.display_ana(adf)
  604.         self.check_qual(self.cur_data)
  605.         self.display_qual()
  606.         self.display_sum(adf)
  607.         self.identify_layers(adf)
  608.         self.display_lay()
  609.         self.plot_viz(self.cur_data, adf)
  610.         self.status_var.set(f"Reset: {len(self.cur_data)} rows at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  611.  
  612.     def check_qual(self, df):
  613.         pc = [c for c in df.columns if "P wave time" in c]
  614.         sc = [c for c in df.columns if "S wave time" in c]
  615.         if not pc or not sc:
  616.             self.qual_issues = pd.DataFrame(columns=['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)', 'Flag Reason'])
  617.             logging.warning("No P/S wave columns found.")
  618.             return
  619.         df['First P-wave Time (µs)'] = df[pc].min(axis=1) * 1000
  620.         df['First S-wave Time (µs)'] = df[sc].min(axis=1) * 1000
  621.         pm = df['First P-wave Time (µs)'].median()
  622.         ps = df['First P-wave Time (µs)'].std()
  623.         sm = df['First S-wave Time (µs)'].median()
  624.         ss = df['First S-wave Time (µs)'].std()
  625.         dd = df['Depth (m)'].diff()
  626.         nmi = dd[dd <= 0].index
  627.         qi = []
  628.         for i, r in df.iterrows():
  629.             f = []
  630.             if pd.notna(r['First P-wave Time (µs)']):
  631.                 if r['First P-wave Time (µs)'] < 0.5:
  632.                     f.append("P-wave time too small")
  633.                 if abs(r['First P-wave Time (µs)'] - pm) > 2 * ps:
  634.                     f.append(f"P-wave outlier (value={r['First P-wave Time (µs)']:.2f}, median={pm:.2f})")
  635.             if pd.notna(r['First S-wave Time (µs)']):
  636.                 if r['First S-wave Time (µs)'] < 0.5:
  637.                     f.append("S-wave time too small")
  638.                 if abs(r['First S-wave Time (µs)'] - sm) > 2 * ss:
  639.                     f.append(f"S-wave outlier (value={r['First S-wave Time (µs)']:.2f}, median={sm:.2f})")
  640.             if i in nmi and i > 0:
  641.                 f.append("Depth not monotonic")
  642.             if f:
  643.                 qi.append([r['Depth (m)'], r['First P-wave Time (µs)'], r['First S-wave Time (µs)'], "; ".join(f)])
  644.                 logging.info(f"Flagged depth {r['Depth (m)']}: {'; '.join(f)}")
  645.         self.qual_issues = pd.DataFrame(qi, columns=['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)', 'Flag Reason']) if qi else pd.DataFrame(columns=['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)', 'Flag Reason'])
  646.         logging.info(f"Quality check processed {len(df)} depths, found {len(qi)} issues.")
  647.  
  648.     def display_qual(self):
  649.         for i in self.qual_tree.get_children():
  650.             self.qual_tree.delete(i)
  651.         c = list(self.qual_issues.columns)
  652.         self.qual_tree["columns"] = c
  653.         for col in c:
  654.             self.qual_tree.heading(col, text=col)
  655.             self.qual_tree.column(col, anchor=tk.CENTER)
  656.         for _, r in self.qual_issues.iterrows():
  657.             self.qual_tree.insert("", tk.END, values=[f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r])
  658.         for col in c:
  659.             dl = [len(str(r.get(col, ""))) for _, r in self.qual_issues.iterrows()][:100]
  660.             self.qual_tree.column(col, width=max(dl, default=0) * 10, minwidth=300 if col == "Flag Reason" else 150)
  661.  
  662.     def run_mc(self):
  663.         if self.cur_data is None:
  664.             messagebox.showwarning("No Data", "Load file.")
  665.             return
  666.         try:
  667.             df = self.cur_data.copy()
  668.             ni = 1000
  669.             tns = 0.01
  670.             pc = [c for c in df.columns if "P wave time" in c]
  671.             sc = [c for c in df.columns if "S wave time" in c]
  672.             if not pc or not sc:
  673.                 raise ValueError("No P/S wave columns")
  674.             if df[pc + sc].isna().all().all():
  675.                 raise ValueError("All P/S wave times are missing")
  676.             res = {'P-wave Velocity (m/s)': [], 'S-wave Velocity (m/s)': [], 'Vp/Vs Ratio': []}
  677.             for _ in range(ni):
  678.                 ds = df.copy()
  679.                 for c in pc + sc:
  680.                     ds[c] = ds[c] * (1 + np.random.normal(0, tns, len(ds)))
  681.                 as_ = self.analyze_data(ds)
  682.                 for p in res:
  683.                     res[p].append(as_[p].values)
  684.             for p in res:
  685.                 res[p] = np.array(res[p])
  686.                 df[f'{p} Mean'] = np.nanmean(res[p], axis=0)
  687.                 df[f'{p} CI Lower'] = np.nanpercentile(res[p], 2.5, axis=0)
  688.                 df[f'{p} CI Upper'] = np.nanpercentile(res[p], 97.5, axis=0)
  689.             self.cur_data = df
  690.             self.display_ana(df)
  691.             self.status_var.set(f"Monte Carlo done at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  692.             logging.info(f"Monte Carlo processed {len(df)} depths with {ni} iterations.")
  693.         except Exception as e:
  694.             messagebox.showerror("Error", f"Monte Carlo failed: {str(e)}")
  695.             logging.error(f"Monte Carlo failed: {str(e)}")
  696.             def upload_corr(self):
  697.         fp = filedialog.askopenfilename(filetypes=[("Excel/CSV", "*.xlsx *.csv"), ("Excel", "*.xlsx"), ("CSV", "*.csv"), ("All", "*.*")])
  698.         if fp:
  699.             try:
  700.                 self.corr_data = pd.read_excel(fp) if fp.endswith('.xlsx') else pd.read_csv(fp)
  701.                 if 'Depth (m)' not in self.corr_data.columns:
  702.                     raise ValueError("SPT/Sonic needs Depth (m)")
  703.                 self.status_var.set(f"Uploaded SPT/Sonic: {Path(fp).name} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  704.             except Exception as e:
  705.                 messagebox.showerror("Error", f"Upload failed: {str(e)}")
  706.                 logging.error(f"Corr upload failed: {str(e)}")
  707.  
  708.     def run_corr(self):
  709.         if self.cur_data is None or self.corr_data is None:
  710.             messagebox.showwarning("No Data", "Load CHST and SPT/Sonic.")
  711.             return
  712.         try:
  713.             sdf = self.analyze_data(self.cur_data)
  714.             mdf = pd.merge(sdf, self.corr_data, on='Depth (m)', how='inner')
  715.             if mdf.empty:
  716.                 raise ValueError("No matching depths")
  717.             cr = []
  718.             sp = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio']
  719.             cp = [c for c in self.corr_data.columns if c != 'Depth (m)']
  720.             for s in sp:
  721.                 for c in cp:
  722.                     vd = mdf[[s, c]].dropna()
  723.                     if len(vd) > 1:
  724.                         cr.append([s, c, *scipy.stats.pearsonr(vd[s], vd[c])])
  725.             self.corr_res = pd.DataFrame(cr, columns=['Seismic Param', 'Corr Param', 'Pearson Corr', 'P-Value'])
  726.             self.display_corr()
  727.             self.plot_corr(mdf)
  728.             self.status_var.set(f"Correlation done at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  729.         except Exception as e:
  730.             messagebox.showerror("Error", f"Correlation failed: {str(e)}")
  731.             logging.error(f"Correlation failed: {str(e)}")
  732.  
  733.     def display_corr(self):
  734.         for i in self.corr_tree.get_children():
  735.             self.corr_tree.delete(i)
  736.         c = list(self.corr_res.columns)
  737.         self.corr_tree["columns"] = c
  738.         for col in c:
  739.             self.corr_tree.heading(col, text=col)
  740.             self.corr_tree.column(col, anchor=tk.CENTER)
  741.         for _, r in self.corr_res.iterrows():
  742.             self.corr_tree.insert("", tk.END, values=[f"{v:.3f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r])
  743.         for col in c:
  744.             dl = [len(str(r.get(col, ""))) for _, r in self.corr_res.iterrows()][:100]
  745.             self.corr_tree.column(col, width=max(dl, default=0) * 10, minwidth=150)
  746.  
  747.     def plot_corr(self, mdf):
  748.         if hasattr(self, 'corr_canvas') and self.corr_canvas:
  749.             self.corr_canvas.get_tk_widget().destroy()
  750.         if hasattr(self, 'corr_fig') and self.corr_fig:
  751.             plt.close(self.corr_fig)
  752.         self.corr_plot_label.pack_forget()
  753.         sp = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)']
  754.         cp = [c for c in mdf.columns if c not in sp + ['Depth (m)']]
  755.         if not cp:
  756.             self.corr_plot_label = ttk.Label(self.corr_tree.master, text="No corr params.")
  757.             self.corr_plot_label.pack(fill=tk.BOTH, expand=True)
  758.             return
  759.         np = len(sp) * len(cp)
  760.         r = (np + 2) // 3
  761.         self.corr_fig, ax = plt.subplots(r, 3, figsize=(15, 4 * r))
  762.         ax = ax.flatten()
  763.         pi = 0
  764.         for s in sp:
  765.             for c in cp:
  766.                 vd = mdf[[s, c]].dropna()
  767.                 if len(vd) > 1:
  768.                     ax[pi].scatter(vd[s], vd[c], c='b', alpha=0.5)
  769.                     ax[pi].set_xlabel(s)
  770.                     ax[pi].set_ylabel(c)
  771.                     ax[pi].set_title(f'{s} vs {c}')
  772.                     ax[pi].grid(True)
  773.                     pi += 1
  774.         for i in range(pi, len(ax)):
  775.             ax[i].axis('off')
  776.         plt.tight_layout()
  777.         self.corr_canvas = FigureCanvasTkAgg(self.corr_fig, master=self.corr_tree.master)
  778.         self.corr_canvas.draw()
  779.         self.corr_canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)
  780.  
  781.     def start_rt(self):
  782.         if self.rt_running:
  783.             return
  784.         self.rt_running = True
  785.         self.rt_data = pd.DataFrame(columns=['Depth (m)', 'Elevation (m)', 'P wave time (ms)', 'S wave time (ms)'])
  786.         self.display_rt()
  787.         self.rt_thread = threading.Thread(target=self.rt_acq, daemon=True)
  788.         self.rt_thread.start()
  789.         self.status_var.set("Started real-time.")
  790.  
  791.     def stop_rt(self):
  792.         self.rt_running = False
  793.         self.status_var.set("Stopped real-time.")
  794.  
  795.     def rt_acq(self):
  796.         rf = self.data_dir / "realtime_chst_data.csv"
  797.         while self.rt_running:
  798.             try:
  799.                 if rf.exists():
  800.                     dn = pd.read_csv(rf)
  801.                     if not dn.empty:
  802.                         self.rt_data = pd.concat([self.rt_data, dn]).drop_duplicates(subset=['Depth (m)']).reset_index(drop=True)
  803.                         self.root.after(0, self.update_rt_display)
  804.                 time.sleep(1)
  805.             except Exception as e:
  806.                 logging.error(f"Real-time error: {str(e)}")
  807.  
  808.     def update_rt_display(self):
  809.         self.display_rt()
  810.         self.plot_rt()
  811.  
  812.     def display_rt(self):
  813.         for i in self.rt_tree.get_children():
  814.             self.rt_tree.delete(i)
  815.         c = list(self.rt_data.columns)
  816.         self.rt_tree["columns"] = c
  817.         for col in c:
  818.             self.rt_tree.heading(col, text=col)
  819.             self.rt_tree.column(col, anchor=tk.CENTER)
  820.         for _, r in self.rt_data.iterrows():
  821.             self.rt_tree.insert("", tk.END, values=[f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r])
  822.         for col in c:
  823.             dl = [len(str(r.get(col, ""))) for _, r in self.rt_data.iterrows()][:100]
  824.             self.corr_tree.column(col, width=max(dl, default=0) * 10, minwidth=150)
  825.  
  826.     def plot_rt(self):
  827.         if hasattr(self, 'rt_canvas') and self.rt_canvas:
  828.             self.rt_canvas.get_tk_widget().destroy()
  829.         if hasattr(self, 'rt_fig') and self.rt_fig:
  830.             plt.close(self.rt_fig)
  831.         self.rt_plot_label.pack_forget()
  832.         if self.rt_data.empty:
  833.             self.rt_plot_label = ttk.Label(self.rt_tree.master, text="No real-time data.")
  834.             self.rt_plot_label.pack(fill=tk.BOTH, expand=True)
  835.             return
  836.         adf = self.analyze_data(self.rt_data)
  837.         self.rt_fig, ax = plt.subplots(figsize=(10, 5))
  838.         ax.plot(adf['P-wave Velocity (m/s)'], adf['Depth (m)'], 'b-', label='P-wave')
  839.         ax.plot(adf['S-wave Velocity (m/s)'], adf['Depth (m)'], 'r-', label='S-wave')
  840.         ax.set_xlabel('Velocity (m/s)')
  841.         ax.set_ylabel('Depth (m)')
  842.         ax.set_title('Real-Time Velocities')
  843.         ax.invert_yaxis()
  844.         ax.legend()
  845.         ax.grid(True)
  846.         self.rt_canvas = FigureCanvasTkAgg(self.rt_fig, master=self.rt_tree.master)
  847.         self.rt_canvas.draw()
  848.         self.rt_canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)
  849.  
  850.     def analyze_data(self, df):
  851.         df = df.copy()
  852.         df['Depth (m)'] = pd.to_numeric(df['Depth (m)'], errors='coerce')
  853.         pc = [c for c in df.columns if "P wave time" in c]
  854.         sc = [c for c in df.columns if "S wave time" in c]
  855.         if not pc or not sc:
  856.             raise ValueError("No P/S wave columns")
  857.         for c in pc + sc:
  858.             df[c] = pd.to_numeric(df[c], errors='coerce')
  859.         df['First P-wave Time (µs)'] = df[pc].min(axis=1) * 1000
  860.         df['First S-wave Time (µs)'] = df[sc].min(axis=1) * 1000
  861.         df['P-wave Time Diff'] = df['First P-wave Time (µs)'].diff()
  862.         df['S-wave Time Diff'] = df['First S-wave Time (µs)'].diff()
  863.         df['P-wave Velocity (m/s)'] = np.nan
  864.         df['S-wave Velocity (m/s)'] = np.nan
  865.         mt = 0.001
  866.         for i in range(1, len(df)):
  867.             dd = df['Depth (m)'].iloc[i] - df['Depth (m)'].iloc[i-1]
  868.             dpt = df['P-wave Time Diff'].iloc[i] / 10**6 if pd.notna(df['P-wave Time Diff'].iloc[i]) else np.nan
  869.             dst = df['S-wave Time Diff'].iloc[i] / 10**6 if pd.notna(df['S-wave Time Diff'].iloc[i]) else np.nan
  870.             if pd.notna(dpt) and abs(dpt) > mt and dd > 0:
  871.                 pv = abs(dd / dpt)
  872.                 if 10 <= pv <= 10000:
  873.                     df['P-wave Velocity (m/s)'].iloc[i] = pv
  874.                 else:
  875.                     logging.warning(f"Invalid P-wave velocity {pv:.2f} at depth {df['Depth (m)'].iloc[i]}")
  876.             if pd.notna(dst) and abs(dst) > mt and dd > 0:
  877.                 sv = abs(dd / dst)
  878.                 if 10 <= sv <= 10000:
  879.                     df['S-wave Velocity (m/s)'].iloc[i] = sv
  880.                 else:
  881.                     logging.warning(f"Invalid S-wave velocity {sv:.2f} at depth {df['Depth (m)'].iloc[i]}")
  882.         df['Vp/Vs Ratio'] = df['P-wave Velocity (m/s)'] / df['S-wave Velocity (m/s)']
  883.         df['Poisson’s Ratio'] = (df['Vp/Vs Ratio']**2 - 2) / (2 * (df['Vp/Vs Ratio']**2 - 1))
  884.         d = 2000
  885.         df['Shear Modulus (MPa)'] = d * df['S-wave Velocity (m/s)']**2 / 10**6
  886.         df['Bulk Modulus (MPa)'] = d * (df['P-wave Velocity (m/s)']**2 - (4/3) * df['S-wave Velocity (m/s)']**2) / 10**6
  887.         df['Young’s Modulus (MPa)'] = df['Shear Modulus (MPa)'] * (3 * df['Bulk Modulus (MPa)'] + df['Shear Modulus (MPa)']) / (df['Bulk Modulus (MPa)'] + df['Shear Modulus (MPa)'])
  888.         logging.info(f"Analyzed {len(df)} depths, P-wave NaNs: {df['P-wave Velocity (m/s)'].isna().sum()}, S-wave NaNs: {df['S-wave Velocity (m/s)'].isna().sum()}")
  889.         return df
  890.  
  891.     def display_ana(self, df):
  892.         for i in self.ana_tree.get_children():
  893.             self.ana_tree.delete(i)
  894.         c = ['Depth (m)', 'P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio', 'Poisson’s Ratio', 'Shear Modulus (MPa)', 'Bulk Modulus (MPa)', 'Young’s Modulus (MPa)'] + \
  895.             [c for c in df.columns if c.endswith('Mean') or c.endswith('CI Lower') or c.endswith('CI Upper')]
  896.         self.ana_tree["columns"] = c
  897.         for col in c:
  898.             self.ana_tree.heading(col, text=col)
  899.             self.ana_tree.column(col, anchor=tk.CENTER)
  900.         for _, r in df.iterrows():
  901.             self.ana_tree.insert("", tk.END, values=[f"{r.get(col, np.nan):.2f}" if isinstance(r.get(col), (int, float)) and pd.notna(r.get(col)) else "" for col in c])
  902.         for col in c:
  903.             dl = [len(str(r.get(col, ""))) for _, r in df.iterrows()][:100]
  904.             self.ana_tree.column(col, width=max(dl, default=0) * 10, minwidth=150)
  905.  
  906.     def identify_layers(self, df):
  907.         vt = 0.2
  908.         l = []
  909.         cl = {'Start Depth': df['Depth (m)'].iloc[0], 'End Depth': df['Depth (m)'].iloc[0], 'P-wave Velocity': [], 'S-wave Velocity': []}
  910.         for i in range(1, len(df)):
  911.             pv = df['P-wave Velocity (m/s)'].iloc[i]
  912.             sv = df['S-wave Velocity (m/s)'].iloc[i]
  913.             if pd.notna(pv) and pd.notna(sv) and 10 <= pv <= 10000 and 10 <= sv <= 10000:
  914.                 cl['P-wave Velocity'].append(pv)
  915.                 cl['S-wave Velocity'].append(sv)
  916.                 cl['End Depth'] = df['Depth (m)'].iloc[i]
  917.                 if i < len(df) - 1:
  918.                     npv = df['P-wave Velocity (m/s)'].iloc[i + 1]
  919.                     if pd.notna(npv) and abs(pv - npv) / pv > vt:
  920.                         ld = {
  921.                             'Start Depth (m)': cl['Start Depth'],
  922.                             'End Depth (m)': cl['End Depth'],
  923.                             'Avg P-wave Velocity (m/s)': np.mean(cl['P-wave Velocity']) if cl['P-wave Velocity'] else np.nan,
  924.                             'Avg S-wave Velocity (m/s)': np.mean(cl['S-wave Velocity']) if cl['S-wave Velocity'] else np.nan
  925.                         }
  926.                         l.append(ld)
  927.                         cl = {'Start Depth': df['Depth (m)'].iloc[i + 1], 'End Depth': df['Depth (m)'].iloc[i + 1], 'P-wave Velocity': [], 'S-wave Velocity': []}
  928.             else:
  929.                 logging.warning(f"Skipped layer at depth {df['Depth (m)'].iloc[i]}: P-wave={pv}, S-wave={sv}")
  930.         if cl['P-wave Velocity']:
  931.             ld = {
  932.                 'Start Depth (m)': cl['Start Depth'],
  933.                 'End Depth (m)': cl['End Depth'],
  934.                 'Avg P-wave Velocity (m/s)': np.mean(cl['P-wave Velocity']) if cl['P-wave Velocity'] else np.nan,
  935.                 'Avg S-wave Velocity (m/s)': np.mean(cl['S-wave Velocity']) if cl['S-wave Velocity'] else np.nan
  936.             }
  937.             l.append(ld)
  938.         self.layers = pd.DataFrame(l) if l else pd.DataFrame(columns=['Start Depth (m)', 'End Depth (m)', 'Avg P-wave Velocity (m/s)', 'Avg S-wave Velocity (m/s)'])
  939.         logging.info(f"Identified {len(l)} layers, checked depths {df['Depth (m)'].min()} to {df['Depth (m)'].max()}")
  940.  
  941.     def display_lay(self):
  942.         for i in self.lay_tree.get_children():
  943.             self.lay_tree.delete(i)
  944.         c = list(self.layers.columns)
  945.         self.lay_tree["columns"] = c
  946.         for col in c:
  947.             self.lay_tree.heading(col, text=col)
  948.             self.lay_tree.column(col, anchor=tk.CENTER)
  949.         for _, r in self.layers.iterrows():
  950.             self.lay_tree.insert("", tk.END, values=[f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r])
  951.         for col in c:
  952.             dl = [len(str(r.get(col, ""))) for _, r in self.layers.iterrows()][:100]
  953.             self.lay_tree.column(col, width=max(dl, default=0) * 10, minwidth=200)
  954.  
  955.     def display_sum(self, df):
  956.         for i in self.sum_tree.get_children():
  957.             self.sum_tree.delete(i)
  958.         p = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio', 'Poisson’s Ratio', 'Shear Modulus (MPa)', 'Bulk Modulus (MPa)', 'Young’s Modulus (MPa)']
  959.         vd = df[p].dropna()
  960.         sd = {
  961.             'Parameter': p,
  962.             'Mean': [vd[c].mean() if not vd[c].empty else np.nan for c in p],
  963.             'Median': [vd[c].median() if not vd[c].empty else np.nan for c in p],
  964.             'Std Dev': [vd[c].std() if not vd[c].empty else np.nan for c in p],
  965.             'Min': [vd[c].min() if not vd[c].empty else np.nan for c in p],
  966.             'Max': [vd[c].max() if not vd[c].empty else np.nan for c in p]
  967.         }
  968.         sdf = pd.DataFrame(sd)
  969.         self.sum_tree["columns"] = list(sdf.columns)
  970.         for col in sdf.columns:
  971.             self.sum_tree.heading(col, text=col)
  972.             self.sum_tree.column(col, anchor=tk.CENTER)
  973.         for _, r in sdf.iterrows():
  974.             self.sum_tree.insert("", tk.END, values=[f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r])
  975.         for col in sdf.columns:
  976.             dl = [len(str(r.get(col, ""))) for _, r in sdf.iterrows()][:100]
  977.             self.sum_tree.column(col, width=max(dl, default=0) * 10, minwidth=150)
  978.         logging.info(f"Summary stats for P-wave Velocity: Mean={sd['Mean'][0]:.2f}, Median={sd['Median'][0]:.2f}, Min={sd['Min'][0]:.2f}, Max={sd['Max'][0]:.2f}")
  979.  
  980.     def plot_viz(self, rdf, adf):
  981.         if hasattr(self, 'plot_canvas') and self.plot_canvas:
  982.             self.plot_canvas.get_tk_widget().destroy()
  983.         if hasattr(self, 'cur_fig') and self.cur_fig:
  984.             plt.close(self.cur_fig)
  985.         self.plot_label.pack_forget()
  986.         if adf.empty:
  987.             self.plot_label = ttk.Label(self.scroll_f, text="No data for viz.")
  988.             self.plot_label.pack(fill=tk.BOTH, expand=True)
  989.             return
  990.         try:
  991.             md = float(self.min_d_var.get()) if self.min_d_var.get().strip() else adf['Depth (m)'].min()
  992.             xd = float(self.max_d_var.get()) if self.max_d_var.get().strip() else adf['Depth (m)'].max()
  993.             if md > xd:
  994.                 md, xd = xd, md
  995.             pdf = adf[(adf['Depth (m)'] >= md) & (adf['Depth (m)'] <= xd)]
  996.         except ValueError:
  997.             pdf = adf
  998.         ap = [p for p, v in self.plot_opts.items() if v.get()]
  999.         if not ap:
  1000.             self.plot_label = ttk.Label(self.scroll_f, text="No params selected.")
  1001.             self.plot_label.pack(fill=tk.BOTH, expand=True)
  1002.             return
  1003.         np = len(ap)
  1004.         r = (np + 2) // 3
  1005.         self.cur_fig, ax = plt.subplots(r, min(np, 3), figsize=(15, 4 * r))
  1006.         ax = [ax] if np == 1 else ax.flatten()
  1007.         pi = 0
  1008.         for p in ap:
  1009.             if p == "Travel Time Deviations":
  1010.                 if self.qual_issues is not None and not self.qual_issues.empty:
  1011.                     ax[pi].scatter(self.qual_issues['First P-wave Time (µs)'], self.qual_issues['Depth (m)'], c='r', label='P-wave Dev', alpha=0.5)
  1012.                     ax[pi].scatter(self.qual_issues['First S-wave Time (µs)'], self.qual_issues['Depth (m)'], c='b', label='S-wave Dev', alpha=0.5)
  1013.                     ax[pi].set_xlabel('Time (µs)')
  1014.                     ax[pi].set_ylabel('Depth (m)')
  1015.                     ax[pi].set_title('Travel Time Deviations')
  1016.                     ax[pi].invert_yaxis()
  1017.                     ax[pi].legend()
  1018.                     ax[pi].grid(True)
  1019.                     pi += 1
  1020.             else:
  1021.                 if p in pdf.columns:
  1022.                     ax[pi].plot(pdf[p], pdf['Depth (m)'], 'b-')
  1023.                     ax[pi].set_xlabel(p)
  1024.                     ax[pi].set_ylabel('Depth (m)')
  1025.                     ax[pi].set_title(p)
  1026.                     ax[pi].invert_yaxis()
  1027.                     ax[pi].grid(True)
  1028.                     pi += 1
  1029.         for i in range(pi, len(ax)):
  1030.             ax[i].axis('off')
  1031.         plt.tight_layout()
  1032.         self.plot_canvas = FigureCanvasTkAgg(self.cur_fig, master=self.scroll_f)
  1033.         self.plot_canvas.draw()
  1034.         self.plot_canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)
  1035.  
  1036.     def update_plots(self):
  1037.         if self.cur_data is not None:
  1038.             adf = self.analyze_data(self.cur_data)
  1039.             self.plot_viz(self.cur_data, adf)
  1040.  
  1041.     def save_plots(self):
  1042.         if self.cur_fig is None:
  1043.             messagebox.showwarning("No Plot", "No plot to save.")
  1044.             return
  1045.         fp = filedialog.asksaveasfilename(defaultextension=".png", filetypes=[("PNG", "*.png"), ("All", "*.*")])
  1046.         if fp:
  1047.             try:
  1048.                 self.cur_fig.savefig(fp, dpi=300)
  1049.                 self.status_var.set(f"Saved plot to {fp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1050.             except Exception as e:
  1051.                 messagebox.showerror("Error", f"Save failed: {str(e)}")
  1052.                 logging.error(f"Plot save failed: {str(e)}")
  1053.  
  1054.     def export_raw(self):
  1055.         if self.raw_data is None:
  1056.             messagebox.showwarning("No Data", "Load file.")
  1057.             return
  1058.         try:
  1059.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  1060.             od = self.out_dir / ts
  1061.             od.mkdir(exist_ok=True)
  1062.             fn = Path(self.cur_file).stem
  1063.             cp = od / f"{fn}_raw_data_{ts}.csv"
  1064.             self.raw_data.to_csv(cp, index=False)
  1065.             self.status_var.set(f"Exported raw to {cp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1066.             messagebox.showinfo("Success", f"Raw data to {cp}")
  1067.         except Exception as e:
  1068.             messagebox.showerror("Error", f"Export failed: {str(e)}")
  1069.             logging.error(f"Raw export failed: {str(e)}")
  1070.  
  1071.     def export_qual(self):
  1072.         if self.qual_issues is None:
  1073.             messagebox.showwarning("No Data", "Load file and check quality.")
  1074.             return
  1075.         try:
  1076.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  1077.             od = self.out_dir / ts
  1078.             od.mkdir(exist_ok=True)
  1079.             fn = Path(self.cur_file).stem
  1080.             cp = od / f"{fn}_quality_check_{ts}.csv"
  1081.             self.qual_issues.to_csv(cp, index=False)
  1082.             self.status_var.set(f"Exported quality to {cp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1083.             messagebox.showinfo("Success", f"Quality to {cp}")
  1084.         except Exception as e:
  1085.             messagebox.showerror("Error", f"Export failed: {str(e)}")
  1086.             logging.error(f"Quality export failed: {str(e)}")
  1087.  
  1088.     def export_sum(self):
  1089.         if self.cur_data is None:
  1090.             messagebox.showwarning("No Data", "Load file.")
  1091.             return
  1092.         try:
  1093.             adf = self.analyze_data(self.cur_data)
  1094.             p = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio', 'Poisson’s Ratio', 'Shear Modulus (MPa)', 'Bulk Modulus (MPa)', 'Young’s Modulus (MPa)']
  1095.             vd = adf[p].dropna()
  1096.             sd = {
  1097.                 'Parameter': p,
  1098.                 'Mean': [vd[c].mean() if not vd[c].empty else np.nan for c in p],
  1099.                 'Median': [vd[c].median() if not vd[c].empty else np.nan for c in p],
  1100.                 'Std Dev': [vd[c].std() if not vd[c].empty else np.nan for c in p],
  1101.                 'Min': [vd[c].min() if not vd[c].empty else np.nan for c in p],
  1102.                 'Max': [vd[c].max() if not vd[c].empty else np.nan for c in p]
  1103.             }
  1104.             sdf = pd.DataFrame(sd)
  1105.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  1106.             od = self.out_dir / ts
  1107.             od.mkdir(exist_ok=True)
  1108.             fn = Path(self.cur_file).stem
  1109.             cp = od / f"{fn}_summary_{ts}.csv"
  1110.             sdf.to_csv(cp, index=False)
  1111.             self.status_var.set(f"Exported summary to {cp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1112.             messagebox.showinfo("Success", f"Summary to {cp}")
  1113.             logging.info(f"Exported summary: Mean P-wave={sd['Mean'][0]:.2f}, Median P-wave={sd['Median'][0]:.2f}")
  1114.         except Exception as e:
  1115.             messagebox.showerror("Error", f"Export failed: {str(e)}")
  1116.             logging.error(f"Summary export failed: {str(e)}")
  1117.  
  1118.     def export_lay(self):
  1119.         if self.layers is None:
  1120.             messagebox.showwarning("No Data", "Load file and identify layers.")
  1121.             return
  1122.         try:
  1123.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  1124.             od = self.out_dir / ts
  1125.             od.mkdir(exist_ok=True)
  1126.             fn = Path(self.cur_file).stem
  1127.             cp = od / f"{fn}_layers_{ts}.csv"
  1128.             self.layers.to_csv(cp, index=False)
  1129.             self.status_var.set(f"Exported layers to {cp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1130.             messagebox.showinfo("Success", f"Layers to {cp}")
  1131.         except Exception as e:
  1132.             messagebox.showerror("Error", f"Export failed: {str(e)}")
  1133.             logging.error(f"Layers export failed: {str(e)}")
  1134.  
  1135.     def export_corr(self):
  1136.         if self.corr_res is None:
  1137.             messagebox.showwarning("No Data", "Run correlation.")
  1138.             return
  1139.         try:
  1140.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  1141.             od = self.out_dir / ts
  1142.             od.mkdir(exist_ok=True)
  1143.             fn = Path(self.cur_file).stem
  1144.             cp = od / f"{fn}_correlation_{ts}.csv"
  1145.             self.corr_res.to_csv(cp, index=False)
  1146.             self.status_var.set(f"Exported correlation to {cp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1147.             messagebox.showinfo("Success", f"Correlation to {cp}")
  1148.         except Exception as e:
  1149.             messagebox.showerror("Error", f"Export failed: {str(e)}")
  1150.             logging.error(f"Correlation export failed: {str(e)}")
  1151.  
  1152.     def export_report(self):
  1153.         if self.cur_data is None:
  1154.             messagebox.showwarning("No Data", "Load file.")
  1155.             return
  1156.         if not shutil.which('latexmk'):
  1157.             messagebox.showerror("Error", "LaTeX not installed.")
  1158.             return
  1159.         try:
  1160.             adf = self.analyze_data(self.cur_data)
  1161.             ts = datetime.now().strftime('%Y%m%d_%H%M')
  1162.             od = self.out_dir / ts
  1163.             od.mkdir(exist_ok=True)
  1164.             fn = Path(self.cur_file).stem
  1165.             tp = od / f"{fn}_detailed_report_{ts}.tex"
  1166.             lc = r"""\documentclass[a4paper,12pt]{article}
  1167. \usepackage{booktabs,longtable,geometry,amsmath,amsfonts,noto}
  1168. \geometry{margin=1in}
  1169. \begin{document}
  1170. \section*{Detailed Seismic Analysis Report}
  1171. \subsection*{Analysis Data}
  1172. \begin{longtable}{@{}""" + "c" * len(adf.columns) + r"""@{}}
  1173. """
  1174. \toprule " + r" & ".join([f"\\textbf{{{c}}}" for c in adf.columns]) + r"\\\midrule\endhead"
  1175.            for _, r in adf.iterrows():
  1176.                lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) if pd.notna(v) else "" for v in r]) + r"\\\midrule" + "\n"
  1177.            lc += r"\bottomrule\end{longtable}"
  1178.            if self.qual_issues is not None and not self.qual_issues.empty:
  1179.                lc += r"\section*{Quality Check Issues}\begin{longtable}{@{}" + "c" * len(self.qual_issues.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in self.qual_issues.columns]) + r"\\\midrule\endhead"
  1180.                for _, r in self.qual_issues.iterrows():
  1181.                    lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  1182.                lc += r"\bottomrule\end{longtable}"
  1183.            p = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio', 'Poisson’s Ratio', 'Shear Modulus (MPa)', 'Bulk Modulus (MPa)', 'Young’s Modulus (MPa)']
  1184.            vd = adf[p].dropna()
  1185.            sd = {
  1186.                'Parameter': p,
  1187.                'Mean': [vd[c].mean() if not vd[c].empty else np.nan for c in p],
  1188.                'Median': [vd[c].median() if not vd[c].empty else np.nan for c in p],
  1189.                'Std Dev': [vd[c].std() if not vd[c].empty else np.nan for c in p],
  1190.                'Min': [vd[c].min() if not vd[c].empty else np.nan for c in p],
  1191.                'Max': [vd[c].max() if not vd[c].empty else np.nan for c in p]
  1192.            }
  1193.            sdf = pd.DataFrame(sd)
  1194.            lc += r"\section*{Summary Statistics}\begin{longtable}{@{}" + "c" * len(sdf.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in sdf.columns]) + r"\\\midrule\endhead"
  1195.            for _, r in sdf.iterrows():
  1196.                lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  1197.            lc += r"\bottomrule\end{longtable}"
  1198.            if self.layers is not None and not self.layers.empty:
  1199.                lc += r"\section*{Identified Layers}\begin{longtable}{@{}" + "c" * len(self.layers.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in self.layers.columns]) + r"\\\midrule\endhead"
  1200.                for _, r in self.layers.iterrows():
  1201.                    lc += r" & ".join([f"{v:.2f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  1202.                lc += r"\bottomrule\end{longtable}"
  1203.            if self.corr_res is not None and not self.corr_res.empty:
  1204.                lc += r"\section*{Correlation Results}\begin{longtable}{@{}" + "c" * len(self.corr_res.columns) + r"@{}}\toprule" + r" & ".join([f"\\textbf{{{c}}}" for c in self.corr_res.columns]) + r"\\\midrule\endhead"
  1205.                for _, r in self.corr_res.iterrows():
  1206.                    lc += r" & ".join([f"{v:.3f}" if isinstance(v, (int, float)) and pd.notna(v) else str(v) for v in r]) + r"\\\midrule" + "\n"
  1207.                lc += r"\bottomrule\end{longtable}"
  1208.            lc += r"\end{document}"
  1209.            with open(tp, 'w', encoding='utf-8') as f:
  1210.                f.write(lc)
  1211.            subprocess.run(['latexmk', '-pdf', f'-outdir={od}', str(tp)], capture_output=True, text=True, check=True)
  1212.            pp = od / f"{fn}_detailed_report_{ts}.pdf"
  1213.            if pp.exists():
  1214.                self.status_var.set(f"Exported report to {pp} at {datetime.now(pytz.timezone('Asia/Kolkata')).strftime('%I:%M %p IST')}")
  1215.                messagebox.showinfo("Success", f"Report to {pp}")
  1216.            else:
  1217.                raise Exception("PDF failed")
  1218.        except Exception as e:
  1219.            messagebox.showerror("Error", f"Export failed: {str(e)}")
  1220.            logging.error(f"Report export failed: {str(e)}")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement