Advertisement
johnkris

Untitled

Jul 1st, 2025
535
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C 81.21 KB | Software | 0 0
  1. import tkinter as tk
  2. from tkinter import ttk, messagebox, filedialog
  3. import os
  4. import shutil
  5. import pandas as pd
  6. import matplotlib.pyplot as plt
  7. from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
  8. import numpy as np
  9. from datetime import datetime
  10. from PIL import Image, ImageTk
  11. import pytz
  12. from scipy.stats import pearsonr
  13. import threading
  14. import time
  15.  
  16. class SeismicWaveAnalyzer:
  17.     def __init__(self, root):
  18.         self.root = root
  19.         self.root.withdraw()
  20.  
  21.         # Directories
  22.         self.data_dir = r"C:\SeismicWaveData"
  23.         self.output_base_dir = os.path.join(self.data_dir, "Outputs")
  24.         try:
  25.             os.makedirs(self.data_dir, exist_ok=True)
  26.             os.makedirs(self.output_base_dir, exist_ok=True)
  27.         except Exception as e:
  28.             messagebox.showerror("Error", f"Failed to create directories: {str(e)}")
  29.             self.root.destroy()
  30.             return
  31.  
  32.         self.show_splash_screen()
  33.  
  34.     def show_splash_screen(self):
  35.         splash = tk.Toplevel()
  36.         splash.title("Welcome")
  37.         splash.configure(bg="#ffffff")
  38.         splash.attributes('-fullscreen', True)
  39.         splash.overrideredirect(True)
  40.  
  41.         try:
  42.             img1 = Image.open(r"C:\SeismicWaveData\picture1.png")
  43.             img1 = img1.resize((200, 200), Image.LANCZOS)
  44.             photo1 = ImageTk.PhotoImage(img1)
  45.             img1_label = tk.Label(splash, image=photo1, bg="#ffffff")
  46.             img1_label.image = photo1
  47.             img1_label.place(x=20, y=20)
  48.         except Exception:
  49.             img1_placeholder = tk.Label(splash, text="[Reference Image 1 Placeholder]", width=20, height=10, bg="#d3d3d3")
  50.             img1_placeholder.place(x=20, y=20)
  51.  
  52.         try:
  53.             img2 = Image.open(r"C:\SeismicWaveData\picture2.png")
  54.             img2 = img2.resize((200, 200), Image.LANCZOS)
  55.             photo2 = ImageTk.PhotoImage(img2)
  56.             img2_label = tk.Label(splash, image=photo2, bg="#ffffff")
  57.             img2_label.image = photo2
  58.             img2_label.place(relx=1.0, y=20, anchor="ne")
  59.         except Exception:
  60.             img2_placeholder = tk.Label(splash, text="[Reference Image 2 Placeholder]", width=20, height=10, bg="#d3d3d3")
  61.             img2_placeholder.place(relx=1.0, y=20, anchor="ne")
  62.  
  63.         text_frame = tk.Frame(splash, bg="#ffffff")
  64.         text_frame.place(relx=0.5, rely=0.5, anchor="center")
  65.  
  66.         tk.Label(text_frame, text="Seismic Wave Analysis Tool", font=("Segoe UI", 20, "bold"), bg="#ffffff").pack(pady=10)
  67.         tk.Label(text_frame, text="An FTT Mode Project", font=("Segoe UI", 16), bg="#ffffff").pack(pady=5)
  68.         tk.Label(text_frame, text="Instrumentation & Engineering Geophysics Group", font=("Segoe UI", 14), bg="#ffffff").pack(pady=5)
  69.         tk.Label(text_frame, text="Dr N Satyavani, Project Lead", font=("Segoe UI", 14, "italic"), bg="#ffffff").pack(pady=5)
  70.  
  71.         splash.after(6000, lambda: self.launch_main_window(splash))
  72.  
  73.     def launch_main_window(self, splash):
  74.         splash.destroy()
  75.         self.root.deiconify()
  76.         self.root.title("Seismic Wave Analysis Tool")
  77.         self.root.geometry("1000x700")
  78.         self.root.state('zoomed')
  79.  
  80.         self.light_theme = {
  81.             "bg": "#ffffff",
  82.             "fg": "#212121",
  83.             "entry_bg": "#f5f5f5",
  84.             "accent": "#003087",
  85.             "button_fg": "#ffffff",
  86.         }
  87.         self.root.configure(bg=self.light_theme["bg"])
  88.  
  89.         self.raw_tree = None
  90.         self.quality_tree = None
  91.         self.analysis_tree = None
  92.         self.summary_tree = None
  93.         self.layers_tree = None
  94.         self.correlation_tree = None
  95.         self.realtime_tree = None
  96.         self.plot_label = None
  97.         self.plot_canvas = None
  98.         self.current_data = None
  99.         self.raw_data = None
  100.         self.current_file = None
  101.         self.deviations = None
  102.         self.quality_issues = None
  103.         self.layers = None
  104.         self.correlation_data = None
  105.         self.realtime_data = None
  106.         self.current_fig = None
  107.         self.selected_rows = {}
  108.         self.raw_data_min_depth_var = tk.StringVar()
  109.         self.raw_data_max_depth_var = tk.StringVar()
  110.         self.realtime_running = False
  111.  
  112.         # Visualization options
  113.         self.plot_options = {
  114.             "Vp/Vs Ratio": tk.BooleanVar(value=True),
  115.             "Poisson's Ratio": tk.BooleanVar(value=True),
  116.             "Shear Modulus": tk.BooleanVar(value=True),
  117.             "Bulk Modulus": tk.BooleanVar(value=True),
  118.             "Young's Modulus": tk.BooleanVar(value=True),
  119.             "Lame's Lambda": tk.BooleanVar(value=True),
  120.             "Travel Time Deviations": tk.BooleanVar(value=True),
  121.         }
  122.         self.min_depth_var = tk.StringVar()
  123.         self.max_depth_var = tk.StringVar()
  124.  
  125.         self.setup_gui()
  126.         self.update_clock()
  127.  
  128.     def setup_gui(self):
  129.         menubar = tk.Menu(self.root)
  130.         self.root.config(menu=menubar)
  131.  
  132.         file_menu = tk.Menu(menubar, tearoff=0)
  133.         menubar.add_cascade(label="File", menu=file_menu)
  134.         file_menu.add_command(label="Exit", command=self.root.quit)
  135.  
  136.         file_frame = ttk.Frame(self.root)
  137.         file_frame.pack(pady=10, padx=10, fill=tk.X)
  138.  
  139.         ttk.Label(file_frame, text="Upload CHST File:", font=("Segoe UI", 10)).pack(side=tk.LEFT, padx=5)
  140.         upload_button = ttk.Button(file_frame, text="Upload", command=self.upload_file, style="Custom.TButton")
  141.         upload_button.pack(side=tk.LEFT, padx=5)
  142.  
  143.         ttk.Button(file_frame, text="Create Template File", command=self.create_template_file, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  144.  
  145.         ttk.Button(file_frame, text="Import CSV to PDF", command=self.import_csv_to_pdf, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  146.  
  147.         refresh_button = ttk.Button(file_frame, text="Refresh", command=self.refresh_file_list, style="Custom.TButton")
  148.         refresh_button.pack(side=tk.LEFT, padx=5)
  149.  
  150.         ttk.Label(file_frame, text="Select File:", font=("Segoe UI", 10)).pack(side=tk.LEFT, padx=5)
  151.         self.file_var = tk.StringVar()
  152.         self.file_combobox = ttk.Combobox(file_frame, textvariable=self.file_var, state="readonly")
  153.         self.file_combobox.pack(side=tk.LEFT, padx=5)
  154.         self.file_combobox.bind("<<ComboboxSelected>>", self.load_file_data)
  155.  
  156.         self.notebook = ttk.Notebook(self.root)
  157.         self.notebook.pack(pady=10, padx=10, fill=tk.BOTH, expand=True)
  158.  
  159.         self.raw_data_frame = ttk.Frame(self.notebook)
  160.         self.notebook.add(self.raw_data_frame, text="Raw Data")
  161.         self.setup_raw_data_tab()
  162.  
  163.         self.quality_frame = ttk.Frame(self.notebook)
  164.         self.notebook.add(self.quality_frame, text="Quality Check")
  165.         self.setup_quality_tab()
  166.  
  167.         self.analysis_frame = ttk.Frame(self.notebook)
  168.         self.notebook.add(self.analysis_frame, text="Analysis")
  169.         self.setup_analysis_tab()
  170.  
  171.         self.summary_frame = ttk.Frame(self.notebook)
  172.         self.notebook.add(self.summary_frame, text="Summary")
  173.         self.setup_summary_tab()
  174.  
  175.         self.layers_frame = ttk.Frame(self.notebook)
  176.         self.notebook.add(self.layers_frame, text="Layers")
  177.         self.setup_layers_tab()
  178.  
  179.         self.correlation_frame = ttk.Frame(self.notebook)
  180.         self.notebook.add(self.correlation_frame, text="Correlation Analysis")
  181.         self.setup_correlation_tab()
  182.  
  183.         self.realtime_frame = ttk.Frame(self.notebook)
  184.         self.notebook.add(self.realtime_frame, text="Real-Time")
  185.         self.setup_realtime_tab()
  186.  
  187.         self.visualization_frame = ttk.Frame(self.notebook)
  188.         self.notebook.add(self.visualization_frame, text="Visualizations")
  189.         self.setup_visualization_tab()
  190.  
  191.         export_frame = ttk.Frame(self.root)
  192.         export_frame.pack(pady=5, fill=tk.X)
  193.         ttk.Button(export_frame, text="Export Raw Data to CSV", command=self.export_raw_data, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  194.         ttk.Button(export_frame, text="Export Quality Check to CSV", command=self.export_quality_check, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  195.         ttk.Button(export_frame, text="Export Analysis to PDF", command=self.export_analysis_to_pdf, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  196.         ttk.Button(export_frame, text="Export Summary to CSV", command=self.export_summary, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  197.         ttk.Button(export_frame, text="Export Layers to CSV", command=self.export_layers, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  198.         ttk.Button(export_frame, text="Export Correlation to CSV", command=self.export_correlation, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  199.         ttk.Button(export_frame, text="Export Detailed Report", command=self.export_detailed_report, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  200.  
  201.         self.status_var = tk.StringVar()
  202.         self.clock_var = tk.StringVar()
  203.         status_frame = ttk.Frame(self.root)
  204.         status_frame.pack(side=tk.BOTTOM, fill=tk.X)
  205.         status_bar = tk.Label(status_frame, textvariable=self.status_var, bd=1, relief=tk.SUNKEN, anchor=tk.W, font=("Segoe UI", 10))
  206.         status_bar.pack(side=tk.LEFT, fill=tk.X, expand=True)
  207.         clock_label = tk.Label(status_frame, textvariable=self.clock_var, bd=1, relief=tk.SUNKEN, anchor=tk.E, font=("Segoe UI", 10))
  208.         clock_label.pack(side=tk.RIGHT)
  209.  
  210.         self.apply_theme()
  211.         self.refresh_file_list()
  212.  
  213.     def apply_theme(self):
  214.         style = ttk.Style()
  215.         style.theme_use('clam')
  216.         style.configure("Custom.TFrame", background=self.light_theme["bg"])
  217.         style.configure("Custom.TButton",
  218.                         background=self.light_theme["accent"],
  219.                         foreground=self.light_theme["button_fg"],
  220.                         font=("Segoe UI", 10),
  221.                         padding=8)
  222.         style.configure("TCombobox", fieldbackground=self.light_theme["entry_bg"], foreground=self.light_theme["fg"])
  223.         style.configure("Treeview", background=self.light_theme["entry_bg"], foreground=self.light_theme["fg"], fieldbackground=self.light_theme["entry_bg"])
  224.         style.configure("Treeview.Heading", background=self.light_theme["bg"], foreground=self.light_theme["fg"])
  225.  
  226.     def update_clock(self):
  227.         ist = pytz.timezone('Asia/Kolkata')
  228.         current_time = datetime.now(ist).strftime("%a, %b %d, %Y %I:%M %p IST")
  229.         self.clock_var.set(current_time)
  230.         self.root.after(1000, self.update_clock)
  231.  
  232.     def setup_raw_data_tab(self):
  233.         selection_frame = ttk.Frame(self.raw_data_frame)
  234.         selection_frame.pack(fill=tk.X, pady=5)
  235.  
  236.         ttk.Label(selection_frame, text="Select Depth Range (m):").pack(side=tk.LEFT, padx=5)
  237.         ttk.Entry(selection_frame, textvariable=self.raw_data_min_depth_var, width=10).pack(side=tk.LEFT, padx=5)
  238.         ttk.Label(selection_frame, text="to").pack(side=tk.LEFT)
  239.         ttk.Entry(selection_frame, textvariable=self.raw_data_max_depth_var, width=10).pack(side=tk.LEFT, padx=5)
  240.         ttk.Button(selection_frame, text="Apply Selection", command=self.apply_selection, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  241.         ttk.Button(selection_frame, text="Reset Selection", command=self.reset_selection, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  242.  
  243.         frame = ttk.Frame(self.raw_data_frame, style="Custom.TFrame")
  244.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  245.  
  246.         canvas = tk.Canvas(frame)
  247.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  248.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  249.         scrollable_frame = ttk.Frame(canvas)
  250.  
  251.         scrollable_frame.bind(
  252.             "<Configure>",
  253.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  254.         )
  255.  
  256.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  257.  
  258.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  259.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  260.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  261.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  262.  
  263.         self.raw_tree = ttk.Treeview(scrollable_frame, show="headings")
  264.         self.raw_tree.pack(fill=tk.BOTH, expand=True)
  265.  
  266.     def setup_quality_tab(self):
  267.         frame = ttk.Frame(self.quality_frame, style="Custom.TFrame")
  268.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  269.  
  270.         canvas = tk.Canvas(frame)
  271.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  272.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  273.         scrollable_frame = ttk.Frame(canvas)
  274.  
  275.         scrollable_frame.bind(
  276.             "<Configure>",
  277.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  278.         )
  279.  
  280.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  281.  
  282.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  283.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  284.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  285.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  286.  
  287.         self.quality_tree = ttk.Treeview(scrollable_frame, show="headings")
  288.         self.quality_tree.pack(fill=tk.BOTH, expand=True)
  289.  
  290.     def setup_analysis_tab(self):
  291.         frame = ttk.Frame(self.analysis_frame, style="Custom.TFrame")
  292.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  293.  
  294.         control_frame = ttk.Frame(frame)
  295.         control_frame.pack(fill=tk.X, pady=5)
  296.         ttk.Button(control_frame, text="Run Monte Carlo Simulation", command=self.run_monte_carlo, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  297.  
  298.         canvas = tk.Canvas(frame)
  299.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  300.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  301.         scrollable_frame = ttk.Frame(canvas)
  302.  
  303.         scrollable_frame.bind(
  304.             "<Configure>",
  305.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  306.         )
  307.  
  308.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  309.  
  310.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  311.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  312.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  313.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  314.  
  315.         self.analysis_tree = ttk.Treeview(scrollable_frame, show="headings")
  316.         self.analysis_tree.pack(fill=tk.BOTH, expand=True)
  317.  
  318.     def setup_summary_tab(self):
  319.         frame = ttk.Frame(self.summary_frame, style="Custom.TFrame")
  320.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  321.  
  322.         canvas = tk.Canvas(frame)
  323.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  324.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  325.         scrollable_frame = ttk.Frame(canvas)
  326.  
  327.         scrollable_frame.bind(
  328.             "<Configure>",
  329.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  330.         )
  331.  
  332.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  333.  
  334.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  335.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  336.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  337.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  338.  
  339.         self.summary_tree = ttk.Treeview(scrollable_frame, show="headings")
  340.         self.summary_tree.pack(fill=tk.BOTH, expand=True)
  341.  
  342.     def setup_layers_tab(self):
  343.         frame = ttk.Frame(self.layers_frame, style="Custom.TFrame")
  344.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  345.  
  346.         canvas = tk.Canvas(frame)
  347.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  348.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  349.         scrollable_frame = ttk.Frame(canvas)
  350.  
  351.         scrollable_frame.bind(
  352.             "<Configure>",
  353.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  354.         )
  355.  
  356.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  357.  
  358.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  359.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  360.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  361.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  362.  
  363.         self.layers_tree = ttk.Treeview(scrollable_frame, show="headings")
  364.         self.layers_tree.pack(fill=tk.BOTH, expand=True)
  365.  
  366.     def setup_correlation_tab(self):
  367.         frame = ttk.Frame(self.correlation_frame, style="Custom.TFrame")
  368.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  369.  
  370.         control_frame = ttk.Frame(frame)
  371.         control_frame.pack(fill=tk.X, pady=5)
  372.         ttk.Button(control_frame, text="Upload SPT/Sonic Log File", command=self.upload_correlation_file, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  373.         ttk.Button(control_frame, text="Run Correlation Analysis", command=self.run_correlation_analysis, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  374.  
  375.         canvas = tk.Canvas(frame)
  376.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  377.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  378.         scrollable_frame = ttk.Frame(canvas)
  379.  
  380.         scrollable_frame.bind(
  381.             "<Configure>",
  382.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  383.         )
  384.  
  385.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  386.  
  387.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  388.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  389.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  390.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  391.  
  392.         self.correlation_tree = ttk.Treeview(scrollable_frame, show="headings")
  393.         self.correlation_tree.pack(fill=tk.BOTH, expand=True)
  394.  
  395.         self.correlation_plot_label = ttk.Label(scrollable_frame, text="Upload SPT/Sonic Log file to view correlations.")
  396.         self.correlation_plot_label.pack(fill=tk.BOTH, expand=True)
  397.  
  398.     def setup_realtime_tab(self):
  399.         frame = ttk.Frame(self.realtime_frame, style="Custom.TFrame")
  400.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  401.  
  402.         control_frame = ttk.Frame(frame)
  403.         control_frame.pack(fill=tk.X, pady=5)
  404.         ttk.Button(control_frame, text="Start Real-Time Acquisition", command=self.start_realtime, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  405.         ttk.Button(control_frame, text="Stop Real-Time Acquisition", command=self.stop_realtime, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  406.  
  407.         canvas = tk.Canvas(frame)
  408.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=canvas.yview)
  409.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=canvas.xview)
  410.         scrollable_frame = ttk.Frame(canvas)
  411.  
  412.         scrollable_frame.bind(
  413.             "<Configure>",
  414.             lambda e: canvas.configure(scrollregion=canvas.bbox("all"))
  415.         )
  416.  
  417.         canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  418.  
  419.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  420.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  421.         canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  422.         canvas.create_window((0, 0), window=scrollable_frame, anchor="nw")
  423.  
  424.         self.realtime_tree = ttk.Treeview(scrollable_frame, show="headings")
  425.         self.realtime_tree.pack(fill=tk.BOTH, expand=True)
  426.  
  427.         self.realtime_plot_label = ttk.Label(scrollable_frame, text="Start real-time acquisition to view data.")
  428.         self.realtime_plot_label.pack(fill=tk.BOTH, expand=True)
  429.  
  430.     def setup_visualization_tab(self):
  431.         frame = ttk.Frame(self.visualization_frame, style="Custom.TFrame")
  432.         frame.pack(pady=10, fill=tk.BOTH, expand=True)
  433.  
  434.         options_frame = ttk.Frame(frame)
  435.         options_frame.pack(fill=tk.X, pady=5)
  436.  
  437.         ttk.Label(options_frame, text="Select Parameters to Plot:", font=("Segoe UI", 10)).pack(side=tk.LEFT, padx=5)
  438.         for param, var in self.plot_options.items():
  439.             ttk.Checkbutton(options_frame, text=param, variable=var, command=self.update_plots).pack(side=tk.LEFT, padx=5)
  440.  
  441.         depth_frame = ttk.Frame(options_frame)
  442.         depth_frame.pack(side=tk.LEFT, padx=10)
  443.         ttk.Label(depth_frame, text="Depth Range (m):").pack(side=tk.LEFT)
  444.         ttk.Entry(depth_frame, textvariable=self.min_depth_var, width=10).pack(side=tk.LEFT, padx=5)
  445.         ttk.Label(depth_frame, text="to").pack(side=tk.LEFT)
  446.         ttk.Entry(depth_frame, textvariable=self.max_depth_var, width=10).pack(side=tk.LEFT, padx=5)
  447.         ttk.Button(depth_frame, text="Update Plot", command=self.update_plots, style="Custom.TButton").pack(side=tk.LEFT, padx=5)
  448.  
  449.         ttk.Button(options_frame, text="Save Plots", command=self.save_plots, style="Custom.TButton").pack(side=tk.RIGHT, padx=5)
  450.  
  451.         self.canvas = tk.Canvas(frame)
  452.         scrollbar_y = ttk.Scrollbar(frame, orient=tk.VERTICAL, command=self.canvas.yview)
  453.         scrollbar_x = ttk.Scrollbar(frame, orient=tk.HORIZONTAL, command=self.canvas.xview)
  454.         self.scrollable_frame = ttk.Frame(self.canvas)
  455.  
  456.         self.scrollable_frame.bind(
  457.             "<Configure>",
  458.             lambda e: self.canvas.configure(scrollregion=self.canvas.bbox("all"))
  459.         )
  460.         self.canvas.configure(yscrollcommand=scrollbar_y.set, xscrollcommand=scrollbar_x.set)
  461.  
  462.         scrollbar_y.pack(side=tk.RIGHT, fill=tk.Y)
  463.         scrollbar_x.pack(side=tk.BOTTOM, fill=tk.X)
  464.         self.canvas.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
  465.         self.canvas.create_window((0, 0), window=self.scrollable_frame, anchor="nw")
  466.  
  467.         self.plot_label = ttk.Label(self.scrollable_frame, text="Select a file to view visualizations.")
  468.         self.plot_label.pack(fill=tk.BOTH, expand=True)
  469.  
  470.     def import_csv_to_pdf(self):
  471.         file_path = filedialog.askopenfilename(filetypes=[("CSV Files", "*.csv"), ("All Files", "*.*")])
  472.         if not file_path:
  473.             return
  474.  
  475.         try:
  476.             df = pd.read_csv(file_path)
  477.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  478.             output_dir = os.path.join(self.output_base_dir, timestamp)
  479.             os.makedirs(output_dir, exist_ok=True)
  480.             input_filename = os.path.splitext(os.path.basename(file_path))[0]
  481.             tex_filename = f"{input_filename}_data_{timestamp}.tex"
  482.             tex_path = os.path.join(output_dir, tex_filename)
  483.             pdf_filename = f"{input_filename}_data_{timestamp}.pdf"
  484.             pdf_path = os.path.join(output_dir, pdf_filename)
  485.  
  486.             latex_content = r"""
  487. \documentclass[a4paper,12pt]{article}
  488. \usepackage{booktabs}
  489. \usepackage{longtable}
  490. \usepackage[margin=1in]{geometry}
  491. \usepackage{amsmath}
  492. \usepackage{amsfonts}
  493. \usepackage{noto}
  494.  
  495. \begin{document}
  496.  
  497. \section*{Seismic Data Table}
  498.  
  499. \begin{longtable}{@{}""" + "c" * len(df.columns) + r"""@{}}
  500. \toprule
  501. """ + " & ".join([f"\\textbf{{{col}}}" for col in df.columns]) + r""" \\
  502. \midrule
  503. \endhead
  504. """
  505.  
  506.             for _, row in df.iterrows():
  507.                 formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  508.                 latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  509.  
  510.             latex_content += r"""
  511. \bottomrule
  512. \end{longtable}
  513.  
  514. \end{document}
  515. """
  516.  
  517.             with open(tex_path, 'w', encoding='utf-8') as f:
  518.                 f.write(latex_content)
  519.  
  520.             os.system(f'latexmk -pdf -outdir="{output_dir}" "{tex_path}"')
  521.  
  522.             if os.path.exists(pdf_path):
  523.                 ist = pytz.timezone('Asia/Kolkata')
  524.                 timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  525.                 self.status_var.set(f"Imported CSV and generated PDF: {pdf_path} at {timestamp}")
  526.                 messagebox.showinfo("Success", f"CSV imported and PDF generated successfully at {pdf_path}.")
  527.             else:
  528.                 raise Exception("PDF compilation failed.")
  529.         except Exception as e:
  530.             messagebox.showerror("Error", f"Failed to import CSV or generate PDF: {str(e)}")
  531.  
  532.     def export_analysis_to_pdf(self):
  533.         if self.current_data is None:
  534.             messagebox.showwarning("No Data", "Please load a valid file before exporting analysis.")
  535.             return
  536.         try:
  537.             analysis_df = self.analyze_data(self.current_data)
  538.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  539.             output_dir = os.path.join(self.output_base_dir, timestamp)
  540.             os.makedirs(output_dir, exist_ok=True)
  541.             input_filename = os.path.splitext(self.current_file)[0]
  542.             tex_filename = f"{input_filename}_analysis_{timestamp}.tex"
  543.             tex_path = os.path.join(output_dir, tex_filename)
  544.             pdf_filename = f"{input_filename}_analysis_{timestamp}.pdf"
  545.             pdf_path = os.path.join(output_dir, pdf_filename)
  546.  
  547.             latex_content = r"""
  548. \documentclass[a4paper,12pt]{article}
  549. \usepackage{booktabs}
  550. \usepackage{longtable}
  551. \usepackage[margin=1in]{geometry}
  552. \usepackage{amsmath}
  553. \usepackage{amsfonts}
  554. \usepackage{noto}
  555.  
  556. \begin{document}
  557.  
  558. \section*{Seismic Analysis Report}
  559.  
  560. \begin{longtable}{@{}""" + "c" * len(analysis_df.columns) + r"""@{}}
  561. \toprule
  562. """ + " & ".join([f"\\textbf{{{col}}}" for col in analysis_df.columns]) + r""" \\
  563. \midrule
  564. \endhead
  565. """
  566.  
  567.             for _, row in analysis_df.iterrows():
  568.                 formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  569.                 latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  570.  
  571.             latex_content += r"""
  572. \bottomrule
  573. \end{longtable}
  574.  
  575. \end{document}
  576. """
  577.  
  578.             with open(tex_path, 'w', encoding='utf-8') as f:
  579.                 f.write(latex_content)
  580.  
  581.             os.system(f'latexmk -pdf -outdir="{output_dir}" "{tex_path}"')
  582.  
  583.             if os.path.exists(pdf_path):
  584.                 ist = pytz.timezone('Asia/Kolkata')
  585.                 timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  586.                 self.status_var.set(f"Exported analysis to PDF: {pdf_path} at {timestamp}")
  587.                 messagebox.showinfo("Success", f"Analysis exported successfully to {pdf_path}.")
  588.             else:
  589.                 raise Exception("PDF compilation failed.")
  590.         except Exception as e:
  591.             messagebox.showerror("Error", f"Failed to export analysis to PDF: {str(e)}")
  592.  
  593.     def refresh_file_list(self):
  594.         files = [f for f in os.listdir(self.data_dir) if f.endswith(('.xlsx', '.csv'))]
  595.         self.file_combobox['values'] = files
  596.         self.file_var.set("")
  597.         self.clear_tabs()
  598.         self.status_var.set("Please select a file from the dropdown.")
  599.  
  600.     def upload_file(self):
  601.         file_path = filedialog.askopenfilename(filetypes=[
  602.             ("Excel and CSV Files", "*.xlsx *.csv"),
  603.             ("Excel Files", "*.xlsx"),
  604.             ("CSV Files", "*.csv"),
  605.             ("All Files", "*.*")
  606.         ])
  607.         if file_path:
  608.             try:
  609.                 dest_path = os.path.join(self.data_dir, os.path.basename(file_path))
  610.                 shutil.copy(file_path, dest_path)
  611.                 self.refresh_file_list()
  612.                 ist = pytz.timezone('Asia/Kolkata')
  613.                 timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  614.                 self.status_var.set(f"Uploaded: {os.path.basename(file_path)} at {timestamp}")
  615.             except Exception as e:
  616.                 messagebox.showerror("Error", f"Failed to upload file: {str(e)}")
  617.  
  618.     def create_template_file(self):
  619.         try:
  620.             columns = ["Depth (m)", "Elevation (m)", "P wave time (ms)", "S wave time (ms)"]
  621.             data_rows = [
  622.                 [3.00, 100.00, 5.00, 10.00],
  623.                 [4.50, 98.50, 5.50, 11.00],
  624.                 [6.00, 97.00, 6.00, 12.00]
  625.             ]
  626.  
  627.             df_template = pd.DataFrame(data_rows, columns=columns)
  628.             template_path = os.path.join(self.data_dir, "template_seismic_data.xlsx")
  629.             df_template.to_excel(template_path, index=False)
  630.  
  631.             self.refresh_file_list()
  632.             ist = pytz.timezone('Asia/Kolkata')
  633.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  634.             self.status_var.set(f"Created template file: template_seismic_data.xlsx at {timestamp}")
  635.             messagebox.showinfo("Success", "Template file created successfully: template_seismic_data.xlsx")
  636.         except Exception as e:
  637.             messagebox.showerror("Error", f"Failed to create template file: {str(e)}")
  638.  
  639.     def find_header_row(self, df):
  640.         for idx, row in df.iterrows():
  641.             row_values = [str(val).strip() if pd.notna(val) else "" for val in row.values]
  642.             if "Depth" in row_values:
  643.                 depth_col_idx = row_values.index("Depth")
  644.                 if idx + 1 < len(df):
  645.                     next_row = df.iloc[idx + 1]
  646.                     next_row_values = [str(val).strip() if pd.notna(val) else "" for val in next_row.values]
  647.                     if depth_col_idx < len(next_row_values) and next_row_values[depth_col_idx] == "(m)":
  648.                         return idx
  649.         return None
  650.  
  651.     def load_file_data(self, event=None):
  652.         selected_file = self.file_var.get()
  653.         if not selected_file:
  654.             return
  655.  
  656.         self.clear_tabs()
  657.  
  658.         self.current_file = selected_file
  659.         file_path = os.path.join(self.data_dir, selected_file)
  660.         try:
  661.             if selected_file.endswith('.xlsx'):
  662.                 xl = pd.ExcelFile(file_path)
  663.                 sheet_names = xl.sheet_names
  664.                 header_row = None
  665.                 target_sheet = None
  666.                 for sheet in sheet_names:
  667.                     df_sheet = pd.read_excel(file_path, sheet_name=sheet, header=None)
  668.                     header_row = self.find_header_row(df_sheet)
  669.                     if header_row is not None:
  670.                         target_sheet = sheet
  671.                         break
  672.  
  673.                 if header_row is None:
  674.                     raise ValueError("Could not find 'Depth' followed by '(m)' in any sheet of the file.")
  675.  
  676.                 df = pd.read_excel(file_path, sheet_name=target_sheet, header=[header_row, header_row + 1])
  677.                 col_names = []
  678.                 col_counts = {}
  679.                 for col in df.columns:
  680.                     col_name = f"{col[0]} {col[1]}".strip() if col[1] and pd.notna(col[1]) else col[0]
  681.                     if col_name in col_counts:
  682.                         col_counts[col_name] += 1
  683.                         col_names.append(f"{col_name} {col_counts[col_name]}")
  684.                     else:
  685.                         col_counts[col_name] = 0
  686.                         col_names.append(col_name)
  687.                 df.columns = col_names
  688.             elif selected_file.endswith('.csv'):
  689.                 df_temp = pd.read_csv(file_path, header=None)
  690.                 header_row = self.find_header_row(df_temp)
  691.                 if header_row is None:
  692.                     raise ValueError("Could not find 'Depth' followed by '(m)' in the CSV file.")
  693.  
  694.                 df = pd.read_csv(file_path, header=[header_row, header_row + 1])
  695.                 col_names = []
  696.                 col_counts = {}
  697.                 for col in df.columns:
  698.                     col_name = f"{col[0]} {col[1]}".strip() if col[1] and pd.notna(col[1]) else col[0]
  699.                     if col_name in col_counts:
  700.                         col_counts[col_name] += 1
  701.                         col_names.append(f"{col_name} {col_counts[col_name]}")
  702.                     else:
  703.                         col_counts[col_name] = 0
  704.                         col_names.append(col_name)
  705.                 df.columns = col_names
  706.             else:
  707.                 raise ValueError("Unsupported file format. Please upload an Excel (.xlsx) or CSV (.csv) file.")
  708.  
  709.             df.columns = [col.replace('\n', ' ') for col in df.columns]
  710.             df.columns = ['Depth (m)' if col.startswith('Depth') else col for col in df.columns]
  711.  
  712.             elevation_cols = [col for col in df.columns if col.startswith('Elevation')]
  713.             if elevation_cols:
  714.                 valid_elevation_col = None
  715.                 for col in elevation_cols:
  716.                     if df[col].notna().any():
  717.                         valid_elevation_col = col
  718.                         break
  719.                 if valid_elevation_col:
  720.                     df.rename(columns={valid_elevation_col: 'Elevation (m)'}, inplace=True)
  721.                     other_elevation_cols = [col for col in elevation_cols if col != valid_elevation_col]
  722.                     df.drop(columns=other_elevation_cols, inplace=True)
  723.  
  724.             df = df.dropna(axis=1, how='all')
  725.             df = df.loc[:, (df.notna().any()) | (df.columns == 'Depth (m)')].dropna(subset=df.columns.difference(['Depth (m)']), how='all')
  726.             df_raw = df[df['Depth (m)'].notna()]
  727.             self.raw_data = df_raw.copy()
  728.  
  729.             self.current_data = self.raw_data.copy()
  730.  
  731.             required_columns = ['Depth (m)', 'Elevation (m)']
  732.             missing_columns = [col for col in required_columns if col not in df.columns]
  733.             if missing_columns:
  734.                 raise ValueError(f"Missing required columns: {', '.join(missing_columns)}")
  735.  
  736.             self.display_raw_data(self.raw_data)
  737.             analysis_df = self.analyze_data(self.current_data)
  738.             self.display_analysis(analysis_df)
  739.             self.check_data_quality(self.current_data)
  740.             self.display_quality_check()
  741.             self.display_summary(analysis_df)
  742.             self.identify_layers(analysis_df)
  743.             self.display_layers()
  744.             self.plot_visualizations(self.current_data, analysis_df)
  745.  
  746.             ist = pytz.timezone('Asia/Kolkata')
  747.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  748.             self.status_var.set(f"Loaded: {selected_file} at {timestamp}")
  749.         except Exception as e:
  750.             messagebox.showerror("Error", f"Failed to load file: {str(e)}")
  751.             self.clear_tabs()
  752.  
  753.     def on_select_row(self, event):
  754.         item = self.raw_tree.identify_row(event.y)
  755.         if not item:
  756.             return
  757.         idx = int(self.raw_tree.index(item))
  758.         current_value = self.selected_rows.get(idx, False)
  759.         self.selected_rows[idx] = not current_value
  760.         self.raw_tree.set(item, "Select", "✔" if self.selected_rows[idx] else "")
  761.  
  762.     def display_raw_data(self, df):
  763.         for item in self.raw_tree.get_children():
  764.             self.raw_tree.delete(item)
  765.  
  766.         full_df = self.raw_data
  767.         columns = ["Select"] + list(full_df.columns)
  768.         self.raw_tree["columns"] = columns
  769.         for col in columns:
  770.             self.raw_tree.heading(col, text=col)
  771.             self.raw_tree.column(col, anchor=tk.CENTER, stretch=True)
  772.  
  773.         self.selected_rows = {}
  774.         for idx, row in full_df.iterrows():
  775.             formatted_row = [""] + [str(val) if pd.notna(val) else "" for val in row]
  776.             self.raw_tree.insert("", tk.END, values=formatted_row)
  777.             self.selected_rows[idx] = False
  778.  
  779.         self.raw_tree.column("Select", width=50, minwidth=50)
  780.         self.raw_tree.bind("<Button-1>", self.on_select_row)
  781.  
  782.         for col in columns[1:]:
  783.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in full_df.iterrows()]
  784.             data_max = max(data_lengths, default=0)
  785.             max_length = max(len(str(col)), data_max)
  786.             self.raw_tree.column(col, width=max_length * 10, minwidth=150)
  787.  
  788.     def apply_selection(self):
  789.         if self.raw_data is None:
  790.             return
  791.  
  792.         df = self.raw_data.copy()
  793.  
  794.         try:
  795.             min_depth = float(self.raw_data_min_depth_var.get()) if self.raw_data_min_depth_var.get() else df['Depth (m)'].min()
  796.             max_depth = float(self.raw_data_max_depth_var.get()) if self.raw_data_max_depth_var.get() else df['Depth (m)'].max()
  797.             if min_depth > max_depth:
  798.                 min_depth, max_depth = max_depth, min_depth
  799.             df = df[(df['Depth (m)'] >= min_depth) & (df['Depth (m)'] <= max_depth)]
  800.         except ValueError:
  801.             pass
  802.  
  803.         selected_indices = [idx for idx, selected in self.selected_rows.items() if selected]
  804.         if selected_indices:
  805.             df = df.iloc[selected_indices]
  806.  
  807.         self.current_data = df
  808.         analysis_df = self.analyze_data(self.current_data)
  809.         self.display_analysis(analysis_df)
  810.         self.check_data_quality(self.current_data)
  811.         self.display_quality_check()
  812.         self.display_summary(analysis_df)
  813.         self.identify_layers(analysis_df)
  814.         self.display_layers()
  815.         self.plot_visualizations(self.current_data, analysis_df)
  816.  
  817.         ist = pytz.timezone('Asia/Kolkata')
  818.         timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  819.         self.status_var.set(f"Applied selection: {len(self.current_data)} rows at {timestamp}")
  820.  
  821.     def reset_selection(self):
  822.         if self.raw_data is None:
  823.             return
  824.  
  825.         self.current_data = self.raw_data.copy()
  826.         self.raw_data_min_depth_var.set("")
  827.         self.raw_data_max_depth_var.set("")
  828.         self.selected_rows = {idx: False for idx in range(len(self.raw_data))}
  829.         for item in self.raw_tree.get_children():
  830.             self.raw_tree.set(item, "Select", "")
  831.  
  832.         analysis_df = self.analyze_data(self.current_data)
  833.         self.display_analysis(analysis_df)
  834.         self.check_data_quality(self.current_data)
  835.         self.display_quality_check()
  836.         self.display_summary(analysis_df)
  837.         self.identify_layers(analysis_df)
  838.         self.display_layers()
  839.         self.plot_visualizations(self.current_data, analysis_df)
  840.  
  841.         ist = pytz.timezone('Asia/Kolkata')
  842.         timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  843.         self.status_var.set(f"Reset selection: {len(self.current_data)} rows at {timestamp}")
  844.  
  845.     def check_data_quality(self, df):
  846.         p_wave_cols = [col for col in df.columns if "P wave time" in col]
  847.         s_wave_cols = [col for col in df.columns if "S wave time" in col]
  848.  
  849.         if not p_wave_cols or not s_wave_cols:
  850.             self.quality_issues = pd.DataFrame(columns=['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)', 'Flag Reason'])
  851.             return
  852.  
  853.         df['First P-wave Time (µs)'] = df[p_wave_cols].min(axis=1) * 1000
  854.         df['First S-wave Time (µs)'] = df[s_wave_cols].min(axis=1) * 1000
  855.  
  856.         p_median = df['First P-wave Time (µs)'].median()
  857.         p_std = df['First P-wave Time (µs)'].std()
  858.         s_median = df['First S-wave Time (µs)'].median()
  859.         s_std = df['First S-wave Time (µs)'].std()
  860.  
  861.         depth_diff = df['Depth (m)'].diff()
  862.         non_monotonic_indices = depth_diff[depth_diff <= 0].index
  863.  
  864.         quality_issues = []
  865.         for idx, row in df.iterrows():
  866.             p_time = row['First P-wave Time (µs)']
  867.             s_time = row['First S-wave Time (µs)']
  868.             depth = row['Depth (m)']
  869.             flags = []
  870.  
  871.             if pd.notna(p_time):
  872.                 if p_time < 0.5:
  873.                     flags.append("P-wave time too small (< 0.5 µs)")
  874.                 if abs(p_time - p_median) > 2 * p_std:
  875.                     flags.append("P-wave time outlier")
  876.             if pd.notna(s_time):
  877.                 if s_time < 0.5:
  878.                     flags.append("S-wave time too small (< 0.5 µs)")
  879.                 if abs(s_time - s_median) > 2 * s_std:
  880.                     flags.append("S-wave time outlier")
  881.  
  882.             if idx in non_monotonic_indices and idx > 0:
  883.                 flags.append("Depth not monotonically increasing")
  884.  
  885.             if flags:
  886.                 quality_issues.append([depth, p_time, s_time, "; ".join(flags)])
  887.  
  888.         if quality_issues:
  889.             self.quality_issues = pd.DataFrame(quality_issues, columns=['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)', 'Flag Reason'])
  890.         else:
  891.             self.quality_issues = pd.DataFrame(columns=['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)', 'Flag Reason'])
  892.  
  893.     def display_quality_check(self):
  894.         for item in self.quality_tree.get_children():
  895.             self.quality_tree.delete(item)
  896.  
  897.         columns = list(self.quality_issues.columns)
  898.         self.quality_tree["columns"] = columns
  899.         for col in columns:
  900.             self.quality_tree.heading(col, text=col)
  901.             self.quality_tree.column(col, anchor=tk.CENTER)
  902.  
  903.         for idx, row in self.quality_issues.iterrows():
  904.             formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) for val in row]
  905.             self.quality_tree.insert("", tk.END, values=formatted_row)
  906.  
  907.         for col in columns:
  908.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in self.quality_issues.iterrows()]
  909.             data_max = max(data_lengths, default=0)
  910.             max_length = max(len(str(col)), data_max)
  911.             min_width = 300 if col == "Flag Reason" else 150
  912.             self.quality_tree.column(col, width=max_length * 10, minwidth=min_width)
  913.  
  914.     def run_monte_carlo(self):
  915.         if self.current_data is None:
  916.             messagebox.showwarning("No Data", "Please load a valid file before running Monte Carlo simulation.")
  917.             return
  918.  
  919.         try:
  920.             df = self.current_data.copy()
  921.             n_iterations = 1000
  922.             time_noise_std = 0.01  # 1% noise in travel times
  923.  
  924.             p_wave_cols = [col for col in df.columns if "P wave time" in col]
  925.             s_wave_cols = [col for col in df.columns if "S wave time" in col]
  926.             if not p_wave_cols or not s_wave_cols:
  927.                 raise ValueError("P-wave or S-wave time columns not found.")
  928.  
  929.             results = {
  930.                 'P-wave Velocity (m/s)': [],
  931.                 'S-wave Velocity (m/s)': [],
  932.                 'Vp/Vs Ratio': []
  933.             }
  934.  
  935.             for _ in range(n_iterations):
  936.                 df_sim = df.copy()
  937.                 for col in p_wave_cols:
  938.                     df_sim[col] = df_sim[col] * (1 + np.random.normal(0, time_noise_std, len(df_sim)))
  939.                 for col in s_wave_cols:
  940.                     df_sim[col] = df_sim[col] * (1 + np.random.normal(0, time_noise_std, len(df_sim)))
  941.  
  942.                 analysis_sim = self.analyze_data(df_sim)
  943.                 for param in results.keys():
  944.                     results[param].append(analysis_sim[param].values)
  945.  
  946.             for param in results:
  947.                 results[param] = np.array(results[param])
  948.                 mean = np.mean(results[param], axis=0)
  949.                 ci_lower = np.percentile(results[param], 2.5, axis=0)
  950.                 ci_upper = np.percentile(results[param], 97.5, axis=0)
  951.                 df[f'{param} Mean'] = mean
  952.                 df[f'{param} CI Lower'] = ci_lower
  953.                 df[f'{param} CI Upper'] = ci_upper
  954.  
  955.             self.current_data = df
  956.             self.display_analysis(self.current_data)
  957.             ist = pytz.timezone('Asia/Kolkata')
  958.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  959.             self.status_var.set(f"Monte Carlo simulation completed at {timestamp}")
  960.         except Exception as e:
  961.             messagebox.showerror("Error", f"Failed to run Monte Carlo simulation: {str(e)}")
  962.  
  963.     def upload_correlation_file(self):
  964.         file_path = filedialog.askopenfilename(filetypes=[
  965.             ("Excel and CSV Files", "*.xlsx *.csv"),
  966.             ("Excel Files", "*.xlsx"),
  967.             ("CSV Files", "*.csv"),
  968.             ("All Files", "*.*")
  969.         ])
  970.         if file_path:
  971.             try:
  972.                 if file_path.endswith('.xlsx'):
  973.                     df = pd.read_excel(file_path)
  974.                 elif file_path.endswith('.csv'):
  975.                     df = pd.read_csv(file_path)
  976.                 else:
  977.                     raise ValueError("Unsupported file format.")
  978.  
  979.                 required_cols = ['Depth (m)']
  980.                 if not all(col in df.columns for col in required_cols):
  981.                     raise ValueError("SPT/Sonic Log file must contain 'Depth (m)' column.")
  982.  
  983.                 self.correlation_data = df
  984.                 ist = pytz.timezone('Asia/Kolkata')
  985.                 timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  986.                 self.status_var.set(f"Uploaded SPT/Sonic Log file: {os.path.basename(file_path)} at {timestamp}")
  987.             except Exception as e:
  988.                 messagebox.showerror("Error", f"Failed to upload SPT/Sonic Log file: {str(e)}")
  989.  
  990.     def run_correlation_analysis(self):
  991.         if self.current_data is None or self.correlation_data is None:
  992.             messagebox.showwarning("No Data", "Please load both CHST and SPT/Sonic Log files.")
  993.             return
  994.  
  995.         try:
  996.             seismic_df = self.analyze_data(self.current_data)
  997.             correlation_df = self.correlation_data
  998.  
  999.             merged_df = pd.merge(seismic_df, correlation_df, on='Depth (m)', how='inner')
  1000.             if merged_df.empty:
  1001.                 raise ValueError("No matching depths found between CHST and SPT/Sonic Log data.")
  1002.  
  1003.             correlation_results = []
  1004.             seismic_params = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio']
  1005.             correlation_params = [col for col in correlation_df.columns if col != 'Depth (m)']
  1006.  
  1007.             for s_param in seismic_params:
  1008.                 for c_param in correlation_params:
  1009.                     if merged_df[s_param].notna().sum() > 1 and merged_df[c_param].notna().sum() > 1:
  1010.                         corr, p_value = pearsonr(merged_df[s_param].dropna(), merged_df[c_param].dropna())
  1011.                         correlation_results.append([s_param, c_param, corr, p_value])
  1012.  
  1013.             self.correlation_results = pd.DataFrame(correlation_results, columns=['Seismic Parameter', 'Correlation Parameter', 'Pearson Correlation', 'P-Value'])
  1014.  
  1015.             self.display_correlation()
  1016.             self.plot_correlation(merged_df)
  1017.             ist = pytz.timezone('Asia/Kolkata')
  1018.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1019.             self.status_var.set(f"Correlation analysis completed at {timestamp}")
  1020.         except Exception as e:
  1021.             messagebox.showerror("Error", f"Failed to run correlation analysis: {str(e)}")
  1022.  
  1023.     def display_correlation(self):
  1024.         for item in self.correlation_tree.get_children():
  1025.             self.correlation_tree.delete(item)
  1026.  
  1027.         columns = list(self.correlation_results.columns)
  1028.         self.correlation_tree["columns"] = columns
  1029.         for col in columns:
  1030.             self.correlation_tree.heading(col, text=col)
  1031.             self.correlation_tree.column(col, anchor=tk.CENTER)
  1032.  
  1033.         for idx, row in self.correlation_results.iterrows():
  1034.             formatted_row = [f"{val:.3f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) for val in row]
  1035.             self.correlation_tree.insert("", tk.END, values=formatted_row)
  1036.  
  1037.         for col in columns:
  1038.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in self.correlation_results.iterrows()]
  1039.             data_max = max(data_lengths, default=0)
  1040.             max_length = max(len(str(col)), data_max)
  1041.             self.correlation_tree.column(col, width=max_length * 10, minwidth=150)
  1042.  
  1043.     def plot_correlation(self, merged_df):
  1044.         if hasattr(self, 'correlation_canvas') and self.correlation_canvas:
  1045.             self.correlation_canvas.get_tk_widget().destroy()
  1046.         if hasattr(self, 'correlation_fig') and self.correlation_fig:
  1047.             plt.close(self.correlation_fig)
  1048.         self.correlation_plot_label.pack_forget()
  1049.  
  1050.         seismic_params = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)']
  1051.         correlation_params = [col for col in merged_df.columns if col not in seismic_params + ['Depth (m)']]
  1052.  
  1053.         if not correlation_params:
  1054.             self.correlation_plot_label = ttk.Label(self.correlation_tree.master, text="No valid correlation parameters found.")
  1055.             self.correlation_plot_label.pack(fill=tk.BOTH, expand=True)
  1056.             return
  1057.  
  1058.         n_plots = len(seismic_params) * len(correlation_params)
  1059.         rows = (n_plots + 2) // 3
  1060.         self.correlation_fig, axes = plt.subplots(rows, 3, figsize=(15, 4 * rows))
  1061.         axes = axes.flatten()
  1062.  
  1063.         plot_idx = 0
  1064.         for s_param in seismic_params:
  1065.             for c_param in correlation_params:
  1066.                 axes[plot_idx].scatter(merged_df[s_param], merged_df[c_param], c='b', alpha=0.5)
  1067.                 axes[plot_idx].set_xlabel(s_param)
  1068.                 axes[plot_idx].set_ylabel(c_param)
  1069.                 axes[plot_idx].set_title(f'{s_param} vs {c_param}')
  1070.                 axes[plot_idx].grid(True)
  1071.                 plot_idx += 1
  1072.  
  1073.         for i in range(plot_idx, len(axes)):
  1074.             axes[i].axis('off')
  1075.  
  1076.         plt.tight_layout()
  1077.         self.correlation_canvas = FigureCanvasTkAgg(self.correlation_fig, master=self.correlation_tree.master)
  1078.         self.correlation_canvas.draw()
  1079.         self.correlation_canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)
  1080.  
  1081.     def start_realtime(self):
  1082.         if self.realtime_running:
  1083.             return
  1084.  
  1085.         self.realtime_running = True
  1086.         self.realtime_data = pd.DataFrame(columns=['Depth (m)', 'Elevation (m)', 'P wave time (ms)', 'S wave time (ms)'])
  1087.         self.display_realtime()
  1088.  
  1089.         self.realtime_thread = threading.Thread(target=self.realtime_acquisition, daemon=True)
  1090.         self.realtime_thread.start()
  1091.         self.status_var.set("Started real-time acquisition.")
  1092.  
  1093.     def stop_realtime(self):
  1094.         self.realtime_running = False
  1095.         self.status_var.set("Stopped real-time acquisition.")
  1096.  
  1097.     def realtime_acquisition(self):
  1098.         realtime_file = os.path.join(self.data_dir, "realtime_chst_data.csv")
  1099.         while self.realtime_running:
  1100.             try:
  1101.                 if os.path.exists(realtime_file):
  1102.                     df_new = pd.read_csv(realtime_file)
  1103.                     if not df_new.empty:
  1104.                         self.realtime_data = pd.concat([self.realtime_data, df_new]).drop_duplicates(subset=['Depth (m)']).reset_index(drop=True)
  1105.                         self.root.after(0, self.update_realtime_display)
  1106.             except Exception as e:
  1107.                 print(f"Real-time error: {str(e)}")
  1108.             time.sleep(1)
  1109.  
  1110.     def update_realtime_display(self):
  1111.         self.display_realtime()
  1112.         self.plot_realtime()
  1113.  
  1114.     def display_realtime(self):
  1115.         for item in self.realtime_tree.get_children():
  1116.             self.realtime_tree.delete(item)
  1117.  
  1118.         columns = list(self.realtime_data.columns)
  1119.         self.realtime_tree["columns"] = columns
  1120.         for col in columns:
  1121.             self.realtime_tree.heading(col, text=col)
  1122.             self.realtime_tree.column(col, anchor=tk.CENTER)
  1123.  
  1124.         for idx, row in self.realtime_data.iterrows():
  1125.             formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) for val in row]
  1126.             self.realtime_tree.insert("", tk.END, values=formatted_row)
  1127.  
  1128.         for col in columns:
  1129.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in self.realtime_data.iterrows()]
  1130.             data_max = max(data_lengths, default=0)
  1131.             max_length = max(len(str(col)), data_max)
  1132.             self.realtime_tree.column(col, width=max_length * 10, minwidth=150)
  1133.  
  1134.     def plot_realtime(self):
  1135.         if hasattr(self, 'realtime_canvas') and self.realtime_canvas:
  1136.             self.realtime_canvas.get_tk_widget().destroy()
  1137.         if hasattr(self, 'realtime_fig') and self.realtime_fig:
  1138.             plt.close(self.realtime_fig)
  1139.         self.realtime_plot_label.pack_forget()
  1140.  
  1141.         if self.realtime_data.empty:
  1142.             self.realtime_plot_label = ttk.Label(self.realtime_tree.master, text="No real-time data available.")
  1143.             self.realtime_plot_label.pack(fill=tk.BOTH, expand=True)
  1144.             return
  1145.  
  1146.         analysis_df = self.analyze_data(self.realtime_data)
  1147.         self.realtime_fig, ax = plt.subplots(figsize=(10, 5))
  1148.         ax.plot(analysis_df['Depth (m)'], analysis_df['P-wave Velocity (m/s)'], 'b-', label='P-wave Velocity')
  1149.         ax.plot(analysis_df['Depth (m)'], analysis_df['S-wave Velocity (m/s)'], 'r-', label='S-wave Velocity')
  1150.         ax.set_xlabel('Depth (m)')
  1151.         ax.set_ylabel('Velocity (m/s)')
  1152.         ax.set_title('Real-Time Seismic Velocities')
  1153.         ax.legend()
  1154.         ax.grid(True)
  1155.  
  1156.         self.realtime_canvas = FigureCanvasTkAgg(self.realtime_fig, master=self.realtime_tree.master)
  1157.         self.realtime_canvas.draw()
  1158.         self.realtime_canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)
  1159.  
  1160.     def analyze_data(self, df):
  1161.         df = df.copy()
  1162.         df['Depth (m)'] = pd.to_numeric(df['Depth (m)'], errors='coerce')
  1163.        
  1164.         p_wave_cols = [col for col in df.columns if "P wave time" in col]
  1165.         s_wave_cols = [col for col in df.columns if "S wave time" in col]
  1166.  
  1167.         if not p_wave_cols or not s_wave_cols:
  1168.             raise ValueError("P-wave or S-wave time columns not found in the data.")
  1169.  
  1170.         for col in p_wave_cols + s_wave_cols:
  1171.             df[col] = pd.to_numeric(df[col], errors='coerce')
  1172.  
  1173.         df['First P-wave Time (µs)'] = df[p_wave_cols].min(axis=1) * 1000
  1174.         df['First S-wave Time (µs)'] = df[s_wave_cols].min(axis=1) * 1000
  1175.  
  1176.         df['P-wave Time Diff'] = df['First P-wave Time (µs)'].diff()
  1177.         df['S-wave Time Diff'] = df['First S-wave Time (µs)'].diff()
  1178.        
  1179.         p_mean_diff = df['P-wave Time Diff'].mean()
  1180.         p_std_diff = df['P-wave Time Diff'].std()
  1181.         s_mean_diff = df['S-wave Time Diff'].mean()
  1182.         s_std_diff = df['S-wave Time Diff'].std()
  1183.        
  1184.         df['P-wave Deviation'] = (df['P-wave Time Diff'] < 0) | (abs(df['P-wave Time Diff'] - p_mean_diff) > 2 * p_std_diff)
  1185.         df['S-wave Deviation'] = (df['S-wave Time Diff'] < 0) | (abs(df['S-wave Time Diff'] - s_mean_diff) > 2 * s_std_diff)
  1186.        
  1187.         self.deviations = df[df['P-wave Deviation'] | df['S-wave Deviation']][['Depth (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)']]
  1188.  
  1189.         df['P-wave Velocity (m/s)'] = 0.0
  1190.         df['S-wave Velocity (m/s)'] = 0.0
  1191.         for i in range(1, len(df)):
  1192.             delta_depth = df['Depth (m)'].iloc[i] - df['Depth (m)'].iloc[i-1]
  1193.             delta_p_time = (df['First P-wave Time (µs)'].iloc[i] - df['First P-wave Time (µs)'].iloc[i-1]) / 10**6 if pd.notna(df['First P-wave Time (µs)'].iloc[i]) and pd.notna(df['First P-wave Time (µs)'].iloc[i-1]) else 0
  1194.             delta_s_time = (df['First S-wave Time (µs)'].iloc[i] - df['First S-wave Time (µs)'].iloc[i-1]) / 10**6 if pd.notna(df['First S-wave Time (µs)'].iloc[i]) and pd.notna(df['First S-wave Time (µs)'].iloc[i-1]) else 0
  1195.             if delta_depth > 0 and delta_p_time > 0:
  1196.                 df.loc[df.index[i], 'P-wave Velocity (m/s)'] = delta_depth / delta_p_time
  1197.             if delta_depth > 0 and delta_s_time > 0:
  1198.                 df.loc[df.index[i], 'S-wave Velocity (m/s)'] = delta_depth / delta_s_time
  1199.  
  1200.         df['P-wave Velocity (m/s)'] = df['P-wave Velocity (m/s)'].replace([np.inf, -np.inf], 0).fillna(0)
  1201.         df['S-wave Velocity (m/s)'] = df['S-wave Velocity (m/s)'].replace([np.inf, -np.inf], 0).fillna(0)
  1202.  
  1203.         df['Vp/Vs Ratio'] = df['P-wave Velocity (m/s)'] / df['S-wave Velocity (m/s)']
  1204.         df['Vp/Vs Ratio'] = df['Vp/Vs Ratio'].replace([np.inf, -np.inf], 0).fillna(0)
  1205.  
  1206.         vp_vs = df['Vp/Vs Ratio']
  1207.         df['Poisson\'s Ratio'] = ((vp_vs**2 - 2) / (2 * (vp_vs**2 - 1))).replace([np.inf, -np.inf], 0).fillna(0)
  1208.  
  1209.         rho = 2500
  1210.         df['Shear Modulus (GPa)'] = (rho * (df['S-wave Velocity (m/s)']**2)) / 1e9
  1211.         df['Shear Modulus (GPa)'] = df['Shear Modulus (GPa)'].replace([np.inf, -np.inf], 0).fillna(0)
  1212.  
  1213.         df['Bulk Modulus (GPa)'] = (rho * (df['P-wave Velocity (m/s)']**2 - (4/3) * df['S-wave Velocity (m/s)']**2)) / 1e9
  1214.         df['Bulk Modulus (GPa)'] = df['Bulk Modulus (GPa)'].replace([np.inf, -np.inf], 0).fillna(0)
  1215.  
  1216.         num = 3 * df['P-wave Velocity (m/s)']**2 - 4 * df['S-wave Velocity (m/s)']**2
  1217.         denom = df['P-wave Velocity (m/s)']**2 - df['S-wave Velocity (m/s)']**2
  1218.         df['Young\'s Modulus (GPa)'] = (rho * df['S-wave Velocity (m/s)']**2 * num / denom) / 1e9
  1219.         df['Young\'s Modulus (GPa)'] = df['Young\'s Modulus (GPa)'].replace([np.inf, -np.inf], 0).fillna(0)
  1220.  
  1221.         df['Lame\'s Lambda (GPa)'] = df['Bulk Modulus (GPa)'] - (2/3) * df['Shear Modulus (GPa)']
  1222.         df['Lame\'s Lambda (GPa)'] = df['Lame\'s Lambda (GPa)'].replace([np.inf, -np.inf], 0).fillna(0)
  1223.  
  1224.         params = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio',
  1225.                   'Poisson\'s Ratio', 'Shear Modulus (GPa)', 'Bulk Modulus (GPa)',
  1226.                   'Young\'s Modulus (GPa)', 'Lame\'s Lambda (GPa)']
  1227.        
  1228.         analysis_df = df.copy()
  1229.         for param in params:
  1230.             analysis_df[f"{param} +10%"] = analysis_df[param] * 1.10
  1231.             analysis_df[f"{param} -10%"] = analysis_df[param] * 0.90
  1232.  
  1233.         analysis_cols = ['Depth (m)', 'Elevation (m)', 'First P-wave Time (µs)', 'First S-wave Time (µs)'] + \
  1234.                         [col for col in analysis_df.columns if col.startswith(tuple(params)) or col in params]
  1235.         return analysis_df[analysis_cols]
  1236.  
  1237.     def identify_layers(self, analysis_df):
  1238.         layers = []
  1239.         threshold = 0.20
  1240.         for i in range(1, len(analysis_df)):
  1241.             prev_p_velocity = analysis_df['P-wave Velocity (m/s)'].iloc[i-1]
  1242.             curr_p_velocity = analysis_df['P-wave Velocity (m/s)'].iloc[i]
  1243.             prev_s_velocity = analysis_df['S-wave Velocity (m/s)'].iloc[i-1]
  1244.             curr_s_velocity = analysis_df['S-wave Velocity (m/s)'].iloc[i]
  1245.             depth = analysis_df['Depth (m)'].iloc[i]
  1246.  
  1247.             p_change = abs(curr_p_velocity - prev_p_velocity) / prev_p_velocity if prev_p_velocity != 0 else 0
  1248.             s_change = abs(curr_s_velocity - prev_s_velocity) / prev_s_velocity if prev_s_velocity != 0 else 0
  1249.  
  1250.             if p_change > threshold or s_change > threshold:
  1251.                 reason = []
  1252.                 if p_change > threshold:
  1253.                     reason.append(f"P-wave velocity change: {p_change:.2%}")
  1254.                 if s_change > threshold:
  1255.                     reason.append(f"S-wave velocity change: {s_change:.2%}")
  1256.                 layers.append([depth, curr_p_velocity, curr_s_velocity, "; ".join(reason)])
  1257.  
  1258.         if layers:
  1259.             self.layers = pd.DataFrame(layers, columns=['Depth (m)', 'P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Reason'])
  1260.         else:
  1261.             self.layers = pd.DataFrame(columns=['Depth (m)', 'P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Reason'])
  1262.  
  1263.     def display_layers(self):
  1264.         for item in self.layers_tree.get_children():
  1265.             self.layers_tree.delete(item)
  1266.  
  1267.         columns = list(self.layers.columns)
  1268.         self.layers_tree["columns"] = columns
  1269.         for col in columns:
  1270.             self.layers_tree.heading(col, text=col)
  1271.             self.layers_tree.column(col, anchor=tk.CENTER, stretch=True)
  1272.  
  1273.         for idx, row in self.layers.iterrows():
  1274.             formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) for val in row]
  1275.             self.layers_tree.insert("", tk.END, values=formatted_row)
  1276.  
  1277.         for col in columns:
  1278.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in self.layers.iterrows()]
  1279.             data_max = max(data_lengths, default=0)
  1280.             max_length = max(len(str(col)), data_max)
  1281.             min_width = 1200 if col == "Reason" else 150
  1282.             self.layers_tree.column(col, width=max(max_length * 20, min_width), minwidth=min_width, stretch=True)
  1283.  
  1284.     def display_analysis(self, analysis_df):
  1285.         for item in self.analysis_tree.get_children():
  1286.             self.analysis_tree.delete(item)
  1287.  
  1288.         columns = list(analysis_df.columns)
  1289.         self.analysis_tree["columns"] = columns
  1290.         for col in columns:
  1291.             self.analysis_tree.heading(col, text=col)
  1292.             self.analysis_tree.column(col, anchor=tk.CENTER)
  1293.  
  1294.         for idx, row in analysis_df.iterrows():
  1295.             formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1296.             self.analysis_tree.insert("", tk.END, values=formatted_row)
  1297.  
  1298.         for col in columns:
  1299.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in analysis_df.iterrows()]
  1300.             data_max = max(data_lengths, default=0)
  1301.             max_length = max(len(str(col)), data_max)
  1302.             self.analysis_tree.column(col, width=max_length * 10, minwidth=150)
  1303.  
  1304.     def compute_summary_stats(self, analysis_df):
  1305.         params = ['P-wave Velocity (m/s)', 'S-wave Velocity (m/s)', 'Vp/Vs Ratio',
  1306.                   'Poisson\'s Ratio', 'Shear Modulus (GPa)', 'Bulk Modulus (GPa)',
  1307.                   'Young\'s Modulus (GPa)', 'Lame\'s Lambda (GPa)']
  1308.        
  1309.         summary_data = []
  1310.         for param in params:
  1311.             data = analysis_df[param]
  1312.             summary_data.append([
  1313.                 param,
  1314.                 data.mean(),
  1315.                 data.median(),
  1316.                 data.std(),
  1317.                 data.min(),
  1318.                 data.max()
  1319.             ])
  1320.  
  1321.         return pd.DataFrame(summary_data, columns=['Parameter', 'Mean', 'Median', 'Std Dev', 'Min', 'Max'])
  1322.  
  1323.     def display_summary(self, analysis_df):
  1324.         summary_df = self.compute_summary_stats(analysis_df)
  1325.        
  1326.         for item in self.summary_tree.get_children():
  1327.             self.summary_tree.delete(item)
  1328.  
  1329.         columns = list(summary_df.columns)
  1330.         self.summary_tree["columns"] = columns
  1331.         for col in columns:
  1332.             self.summary_tree.heading(col, text=col)
  1333.             self.summary_tree.column(col, anchor=tk.CENTER)
  1334.  
  1335.         for idx, row in summary_df.iterrows():
  1336.             formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) for val in row]
  1337.             self.summary_tree.insert("", tk.END, values=formatted_row)
  1338.  
  1339.         for col in columns:
  1340.             data_lengths = [len(str(row.get(col, ""))) if pd.notna(row.get(col)) else 0 for _, row in summary_df.iterrows()]
  1341.             data_max = max(data_lengths, default=0)
  1342.             max_length = max(len(str(col)), data_max)
  1343.             self.summary_tree.column(col, width=max_length * 10, minwidth=150)
  1344.  
  1345.     def plot_visualizations(self, df, analysis_df):
  1346.         if self.plot_canvas:
  1347.             self.plot_canvas.get_tk_widget().destroy()
  1348.         if self.current_fig:
  1349.             plt.close(self.current_fig)
  1350.         self.plot_label.pack_forget()
  1351.  
  1352.         try:
  1353.             min_depth = float(self.min_depth_var.get()) if self.min_depth_var.get() else analysis_df['Depth (m)'].min()
  1354.             max_depth = float(self.max_depth_var.get()) if self.max_depth_var.get() else analysis_df['Depth (m)'].max()
  1355.             if min_depth > max_depth:
  1356.                 min_depth, max_depth = max_depth, min_depth
  1357.         except ValueError:
  1358.             min_depth = analysis_df['Depth (m)'].min()
  1359.             max_depth = analysis_df['Depth (m)'].max()
  1360.  
  1361.         plot_df = analysis_df[(analysis_df['Depth (m)'] >= min_depth) & (analysis_df['Depth (m)'] <= max_depth)]
  1362.         plot_deviations = self.deviations
  1363.         if plot_deviations is not None and not plot_deviations.empty:
  1364.             plot_deviations = plot_deviations[(plot_deviations['Depth (m)'] >= min_depth) & (plot_deviations['Depth (m)'] <= max_depth)]
  1365.  
  1366.         plots_to_show = [param for param, var in self.plot_options.items() if var.get()]
  1367.         if not plots_to_show:
  1368.             self.plot_label = ttk.Label(self.scrollable_frame, text="Select at least one parameter to plot.")
  1369.             self.plot_label.pack(fill=tk.BOTH, expand=True)
  1370.             return
  1371.  
  1372.         num_plots = len(plots_to_show)
  1373.         rows = (num_plots + 2) // 3
  1374.         fig, axes = plt.subplots(rows, 3, figsize=(15, 4 * rows))
  1375.         axes = axes.flatten()
  1376.  
  1377.         plot_index = 0
  1378.         plot_configs = [
  1379.             ("Vp/Vs Ratio", 'g-', 'Vp/Vs Ratio', 'Vp/Vs Ratio vs Depth'),
  1380.             ("Poisson's Ratio", 'b-', 'Poisson\'s Ratio', 'Poisson\'s Ratio vs Depth'),
  1381.             ("Shear Modulus", 'r-', 'Shear Modulus (GPa)', 'Shear Modulus vs Depth'),
  1382.             ("Bulk Modulus", 'c-', 'Bulk Modulus (GPa)', 'Bulk Modulus vs Depth'),
  1383.             ("Young's Modulus", 'm-', 'Young\'s Modulus (GPa)', 'Young\'s Modulus vs Depth'),
  1384.             ("Lame's Lambda", 'y-', 'Lame\'s Lambda (GPa)', 'Lame\'s Lambda vs Depth'),
  1385.             ("Travel Time Deviations", None, None, 'Travel Time Deviations')
  1386.         ]
  1387.  
  1388.         for param, color, y_label, title in plot_configs:
  1389.             if param not in plots_to_show:
  1390.                 continue
  1391.  
  1392.             if param == "Travel Time Deviations":
  1393.                 if plot_deviations is not None and not plot_deviations.empty:
  1394.                     axes[plot_index].plot(plot_deviations['Depth (m)'], plot_deviations['First P-wave Time (µs)'], 'r-', label='P-wave Deviations')
  1395.                     axes[plot_index].plot(plot_deviations['Depth (m)'], plot_deviations['First S-wave Time (µs)'], 'b-', label='S-wave Deviations')
  1396.                     axes[plot_index].set_xlabel('Depth (m)', fontsize=12)
  1397.                     axes[plot_index].set_ylabel('Travel Time (µs)', fontsize=12)
  1398.                     axes[plot_index].set_title('Travel Time Deviations', fontsize=14)
  1399.                     axes[plot_index].grid(True)
  1400.                     axes[plot_index].legend()
  1401.                 else:
  1402.                     axes[plot_index].text(0.5, 0.5, 'No Deviations Detected', horizontalalignment='center', verticalalignment='center')
  1403.                     axes[plot_index].set_title('Travel Time Deviations', fontsize=14)
  1404.                     axes[plot_index].grid(True)
  1405.             else:
  1406.                 axes[plot_index].plot(plot_df['Depth (m)'], plot_df[y_label], color, label=y_label)
  1407.                 axes[plot_index].set_xlabel('Depth (m)', fontsize=12)
  1408.                 axes[plot_index].set_ylabel(y_label, fontsize=12)
  1409.                 axes[plot_index].set_title(title, fontsize=14)
  1410.                 axes[plot_index].grid(True)
  1411.                 axes[plot_index].legend()
  1412.  
  1413.             plot_index += 1
  1414.  
  1415.         for i in range(plot_index, len(axes)):
  1416.             axes[i].axis('off')
  1417.  
  1418.         plt.tight_layout()
  1419.  
  1420.         self.current_fig = fig
  1421.         self.plot_canvas = FigureCanvasTkAgg(fig, master=self.scrollable_frame)
  1422.         self.plot_canvas.draw()
  1423.         self.plot_canvas.get_tk_widget().pack(fill=tk.BOTH, expand=True)
  1424.  
  1425.     def update_plots(self):
  1426.         if self.current_data is None:
  1427.             return
  1428.         analysis_df = self.analyze_data(self.current_data)
  1429.         self.plot_visualizations(self.current_data, analysis_df)
  1430.  
  1431.     def save_plots(self):
  1432.         if self.current_fig is None:
  1433.             messagebox.showwarning("No Plot", "No plots available to save.")
  1434.             return
  1435.  
  1436.         file_path = filedialog.asksaveasfilename(defaultextension=".png", filetypes=[("PNG Files", "*.png"), ("All Files", "*.*")])
  1437.         if file_path:
  1438.             try:
  1439.                 self.current_fig.savefig(file_path, dpi=300, bbox_inches='tight')
  1440.                 ist = pytz.timezone('Asia/Kolkata')
  1441.                 timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1442.                 self.status_var.set(f"Saved plot to {file_path} at {timestamp}")
  1443.             except Exception as e:
  1444.                 messagebox.showerror("Error", f"Failed to save plot: {str(e)}")
  1445.  
  1446.     def export_raw_data(self):
  1447.         if self.current_data is None:
  1448.             messagebox.showwarning("No Data", "Please load a valid file before exporting raw data.")
  1449.             return
  1450.         try:
  1451.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  1452.             output_dir = os.path.join(self.output_base_dir, timestamp)
  1453.             os.makedirs(output_dir, exist_ok=True)
  1454.             input_filename = os.path.splitext(self.current_file)[0]
  1455.             csv_filename = f"{input_filename}_raw_data_{timestamp}.csv"
  1456.             csv_path = os.path.join(output_dir, csv_filename)
  1457.             self.current_data.to_csv(csv_path, index=False)
  1458.             ist = pytz.timezone('Asia/Kolkata')
  1459.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1460.             self.status_var.set(f"Exported raw data to {csv_path} at {timestamp}")
  1461.             messagebox.showinfo("Success", f"Raw data exported successfully to {csv_path}.")
  1462.         except Exception as e:
  1463.             messagebox.showerror("Error", f"Failed to export raw data: {str(e)}")
  1464.  
  1465.     def export_quality_check(self):
  1466.         if self.quality_issues is None or self.quality_issues.empty:
  1467.             messagebox.showwarning("No Data", "No quality check data available to export.")
  1468.             return
  1469.         try:
  1470.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  1471.             output_dir = os.path.join(self.output_base_dir, timestamp)
  1472.             os.makedirs(output_dir, exist_ok=True)
  1473.             input_filename = os.path.splitext(self.current_file)[0]
  1474.             csv_filename = f"{input_filename}_quality_check_{timestamp}.csv"
  1475.             csv_path = os.path.join(output_dir, csv_filename)
  1476.             self.quality_issues.to_csv(csv_path, index=False)
  1477.             ist = pytz.timezone('Asia/Kolkata')
  1478.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1479.             self.status_var.set(f"Exported quality check to {csv_path} at {timestamp}")
  1480.             messagebox.showinfo("Success", f"Quality check exported successfully to {csv_path}.")
  1481.         except Exception as e:
  1482.             messagebox.showerror("Error", f"Failed to export quality check: {str(e)}")
  1483.  
  1484.     def export_summary(self):
  1485.         if self.current_data is None:
  1486.             messagebox.showwarning("No Data", "Please load a valid file before exporting summary.")
  1487.             return
  1488.         try:
  1489.             analysis_df = self.analyze_data(self.current_data)
  1490.             summary_df = self.compute_summary_stats(analysis_df)
  1491.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  1492.             output_dir = os.path.join(self.output_base_dir, timestamp)
  1493.             os.makedirs(output_dir, exist_ok=True)
  1494.             input_filename = os.path.splitext(self.current_file)[0]
  1495.             csv_filename = f"{input_filename}_summary_{timestamp}.csv"
  1496.             csv_path = os.path.join(output_dir, csv_filename)
  1497.             summary_df.to_csv(csv_path, index=False)
  1498.             ist = pytz.timezone('Asia/Kolkata')
  1499.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1500.             self.status_var.set(f"Exported summary to {csv_path} at {timestamp}")
  1501.             messagebox.showinfo("Success", f"Summary exported successfully to {csv_path}.")
  1502.         except Exception as e:
  1503.             messagebox.showerror("Error", f"Failed to export summary: {str(e)}")
  1504.  
  1505.     def export_layers(self):
  1506.         if self.layers is None or self.layers.empty:
  1507.             messagebox.showwarning("No Data", "No layer data available to export.")
  1508.             return
  1509.         try:
  1510.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  1511.             output_dir = os.path.join(self.output_base_dir, timestamp)
  1512.             os.makedirs(output_dir, exist_ok=True)
  1513.             input_filename = os.path.splitext(self.current_file)[0]
  1514.             csv_filename = f"{input_filename}_layers_{timestamp}.csv"
  1515.             csv_path = os.path.join(output_dir, csv_filename)
  1516.             self.layers.to_csv(csv_path, index=False)
  1517.             ist = pytz.timezone('Asia/Kolkata')
  1518.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1519.             self.status_var.set(f"Exported layers to {csv_path} at {timestamp}")
  1520.             messagebox.showinfo("Success", f"Layers exported successfully to {csv_path}.")
  1521.         except Exception as e:
  1522.             messagebox.showerror("Error", f"Failed to export layers: {str(e)}")
  1523.  
  1524.     def export_correlation(self):
  1525.         if self.correlation_results is None or self.correlation_results.empty:
  1526.             messagebox.showwarning("No Data", "No correlation data available to export.")
  1527.             return
  1528.         try:
  1529.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  1530.             output_dir = os.path.join(self.output_base_dir, timestamp)
  1531.             os.makedirs(output_dir, exist_ok=True)
  1532.             input_filename = os.path.splitext(self.current_file)[0]
  1533.             csv_filename = f"{input_filename}_correlation_{timestamp}.csv"
  1534.             csv_path = os.path.join(output_dir, csv_filename)
  1535.             self.correlation_results.to_csv(csv_path, index=False)
  1536.             ist = pytz.timezone('Asia/Kolkata')
  1537.             timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1538.             self.status_var.set(f"Exported correlation data to {csv_path} at {timestamp}")
  1539.             messagebox.showinfo("Success", f"Correlation data exported successfully to {csv_path}.")
  1540.         except Exception as e:
  1541.             messagebox.showerror("Error", f"Failed to export correlation data: {str(e)}")
  1542.  
  1543.     def export_detailed_report(self):
  1544.         if self.current_data is None:
  1545.             messagebox.showwarning("No Data", "Please load a valid file before exporting detailed report.")
  1546.             return
  1547.         try:
  1548.             analysis_df = self.analyze_data(self.current_data)
  1549.             summary_df = self.compute_summary_stats(analysis_df)
  1550.             timestamp = datetime.now().strftime('%Y%m%d_%H%M')
  1551.             output_dir = os.path.join(self.output_base_dir, timestamp)
  1552.             os.makedirs(output_dir, exist_ok=True)
  1553.             input_filename = os.path.splitext(self.current_file)[0]
  1554.             tex_filename = f"{input_filename}_detailed_report_{timestamp}.tex"
  1555.             tex_path = os.path.join(output_dir, tex_filename)
  1556.             pdf_filename = f"{input_filename}_detailed_report_{timestamp}.pdf"
  1557.             pdf_path = os.path.join(output_dir, pdf_filename)
  1558.  
  1559.             latex_content = r"""
  1560. \documentclass[a4paper,12pt]{article}
  1561. \usepackage{booktabs}
  1562. \usepackage{longtable}
  1563. \usepackage[margin=1in]{geometry}
  1564. \usepackage{amsmath}
  1565. \usepackage{amsfonts}
  1566. \usepackage{noto}
  1567.  
  1568. \begin{document}
  1569.  
  1570. \section*{Detailed Seismic Analysis Report}
  1571.  
  1572. \subsection*{Raw Data}
  1573. \begin{longtable}{@{}""" + "c" * len(self.current_data.columns) + r"""@{}}
  1574. \toprule
  1575. """ + " & ".join([f"\\textbf{{{col}}}" for col in self.current_data.columns]) + r""" \\
  1576. \midrule
  1577. \endhead
  1578. """
  1579.             for _, row in self.current_data.iterrows():
  1580.                 formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1581.                 latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  1582.             latex_content += r"""
  1583. \bottomrule
  1584. \end{longtable}
  1585.  
  1586. \subsection*{Quality Check}
  1587. """
  1588.             if self.quality_issues is not None and not self.quality_issues.empty:
  1589.                 latex_content += r"""
  1590. \begin{longtable}{@{}""" + "c" * len(self.quality_issues.columns) + r"""@{}}
  1591. \toprule
  1592. """ + " & ".join([f"\\textbf{{{col}}}" for col in self.quality_issues.columns]) + r""" \\
  1593. \midrule
  1594. \endhead
  1595. """
  1596.                 for _, row in self.quality_issues.iterrows():
  1597.                     formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1598.                     latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  1599.                 latex_content += r"""
  1600. \bottomrule
  1601. \end{longtable}
  1602. """
  1603.             else:
  1604.                 latex_content += r"No quality issues detected."
  1605.  
  1606.             latex_content += r"""
  1607. \subsection*{Analysis}
  1608. \begin{longtable}{@{}""" + "c" * len(analysis_df.columns) + r"""@{}}
  1609. \toprule
  1610. """ + " & ".join([f"\\textbf{{{col}}}" for col in analysis_df.columns]) + r""" \\
  1611. \midrule
  1612. \endhead
  1613. """
  1614.             for _, row in analysis_df.iterrows():
  1615.                 formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1616.                 latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  1617.             latex_content += r"""
  1618. \bottomrule
  1619. \end{longtable}
  1620.  
  1621. \subsection*{Summary}
  1622. \begin{longtable}{@{}""" + "c" * len(summary_df.columns) + r"""@{}}
  1623. \toprule
  1624. """ + " & ".join([f"\\textbf{{{col}}}" for col in summary_df.columns]) + r""" \\
  1625. \midrule
  1626. \endhead
  1627. """
  1628.             for _, row in summary_df.iterrows():
  1629.                 formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1630.                 latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  1631.             latex_content += r"""
  1632. \bottomrule
  1633. \end{longtable}
  1634.  
  1635. \subsection*{Identified Layers}
  1636. """
  1637.             if self.layers is not None and not self.layers.empty:
  1638.                 latex_content += r"""
  1639. \begin{longtable}{@{}""" + "c" * len(self.layers.columns) + r"""@{}}
  1640. \toprule
  1641. """ + " & ".join([f"\\textbf{{{col}}}" for col in self.layers.columns]) + r""" \\
  1642. \midrule
  1643. \endhead
  1644. """
  1645.                 for _, row in self.layers.iterrows():
  1646.                     formatted_row = [f"{val:.2f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1647.                     latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  1648.                 latex_content += r"""
  1649. \bottomrule
  1650. \end{longtable}
  1651. """
  1652.             else:
  1653.                 latex_content += r"No significant layer boundaries detected."
  1654.  
  1655.             latex_content += r"""
  1656. \subsection*{Correlation Analysis}
  1657. """
  1658.             if self.correlation_results is not None and not self.correlation_results.empty:
  1659.                 latex_content += r"""
  1660. \begin{longtable}{@{}""" + "c" * len(self.correlation_results.columns) + r"""@{}}
  1661. \toprule
  1662. """ + " & ".join([f"\\textbf{{{col}}}" for col in self.correlation_results.columns]) + r""" \\
  1663. \midrule
  1664. \endhead
  1665. """
  1666.                 for _, row in self.correlation_results.iterrows():
  1667.                     formatted_row = [f"{val:.3f}" if isinstance(val, (int, float)) and pd.notna(val) else str(val) if pd.notna(val) else "" for val in row]
  1668.                     latex_content += " & ".join(formatted_row) + r" \\ \midrule" + "\n"
  1669.                 latex_content += r"""
  1670. \bottomrule
  1671. \end{longtable}
  1672. """
  1673.             else:
  1674.                 latex_content += r"No correlation analysis data available."
  1675.  
  1676.             latex_content += r"""
  1677. \end{document}
  1678. """
  1679.  
  1680.             with open(tex_path, 'w', encoding='utf-8') as f:
  1681.                 f.write(latex_content)
  1682.  
  1683.             os.system(f'latexmk -pdf -outdir="{output_dir}" "{tex_path}"')
  1684.  
  1685.             if os.path.exists(pdf_path):
  1686.                 ist = pytz.timezone('Asia/Kolkata')
  1687.                 timestamp = datetime.now(ist).strftime("%I:%M %p IST")
  1688.                 self.status_var.set(f"Exported detailed report to {pdf_path} at {timestamp}")
  1689.                 messagebox.showinfo("Success", f"Detailed report exported successfully to {pdf_path}.")
  1690.             else:
  1691.                 raise Exception("PDF compilation failed.")
  1692.         except Exception as e:
  1693.             messagebox.showerror("Error", f"Failed to export detailed report: {str(e)}")
  1694.  
  1695.     def clear_tabs(self):
  1696.         for tree in [self.raw_tree, self.quality_tree, self.analysis_tree, self.summary_tree, self.layers_tree, self.correlation_tree, self.realtime_tree]:
  1697.             if tree:
  1698.                 for item in tree.get_children():
  1699.                     tree.delete(item)
  1700.         if self.plot_canvas:
  1701.             self.plot_canvas.get_tk_widget().destroy()
  1702.             self.plot_canvas = None
  1703.         if self.current_fig:
  1704.             plt.close(self.current_fig)
  1705.             self.current_fig = None
  1706.         self.plot_label = ttk.Label(self.scrollable_frame, text="Select a file to view visualizations.")
  1707.         self.plot_label.pack(fill=tk.BOTH, expand=True)
  1708.         if hasattr(self, 'correlation_canvas') and self.correlation_canvas:
  1709.             self.correlation_canvas.get_tk_widget().destroy()
  1710.             self.correlation_canvas = None
  1711.         if hasattr(self, 'correlation_fig') and self.correlation_fig:
  1712.             plt.close(self.correlation_fig)
  1713.             self.correlation_fig = None
  1714.         self.correlation_plot_label = ttk.Label(self.correlation_tree.master, text="Upload SPT/Sonic Log file to view correlations.")
  1715.         self.correlation_plot_label.pack(fill=tk.BOTH, expand=True)
  1716.         if hasattr(self, 'realtime_canvas') and self.realtime_canvas:
  1717.             self.realtime_canvas.get_tk_widget().destroy()
  1718.             self.realtime_canvas = None
  1719.         if hasattr(self, 'realtime_fig') and self.realtime_fig:
  1720.             plt.close(self.realtime_fig)
  1721.             self.realtime_fig = None
  1722.         self.realtime_plot_label = ttk.Label(self.realtime_tree.master, text="Start real-time acquisition to view data.")
  1723.         self.realtime_plot_label.pack(fill=tk.BOTH, expand=True)
  1724.         self.current_data = None
  1725.         self.raw_data = None
  1726.         self.quality_issues = None
  1727.         self.layers = None
  1728.         self.correlation_results = None
  1729.         self.realtime_data = None
  1730.         self.selected_rows = {}
  1731.  
  1732. if __name__ == "__main__":
  1733.     root = tk.Tk()
  1734.     app = SeismicWaveAnalyzer(root)
  1735.     root.mainloop()
  1736.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement