Spaces:
Runtime error
Runtime error
| # backend/app/main.py | |
| import os | |
| import io | |
| import json | |
| import math | |
| import base64 | |
| import uuid | |
| import datetime | |
| from typing import List, Dict, Any, Optional | |
| from fastapi import FastAPI, HTTPException, UploadFile, File, Form, Request | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from fastapi.responses import JSONResponse, FileResponse | |
| from fastapi.staticfiles import StaticFiles | |
| from pydantic import BaseModel | |
| from reportlab.lib.pagesizes import A4 | |
| from reportlab.lib import colors | |
| from reportlab.lib.units import mm | |
| from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, Image as RLImage | |
| from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle | |
| import numpy as np | |
| import matplotlib | |
| matplotlib.use("Agg") | |
| import matplotlib.pyplot as plt | |
| from PIL import Image | |
| import pytesseract | |
| import requests | |
| # Attempt to import optional heavy libs | |
| try: | |
| from groq import Groq | |
| GROQ_AVAILABLE = True | |
| except Exception: | |
| GROQ_AVAILABLE = False | |
| try: | |
| import ee | |
| EE_AVAILABLE = True | |
| except Exception: | |
| EE_AVAILABLE = False | |
| # --- Configuration from env/secrets --- | |
| GROQ_API_KEY = os.environ.get("GROQ_API_KEY") # Groq API key | |
| SERVICE_ACCOUNT = os.environ.get("SERVICE_ACCOUNT") # EE service account email | |
| EARTH_ENGINE_KEY = os.environ.get("EARTH_ENGINE_KEY") # EE JSON key content (full JSON text) | |
| # --- In-memory site store (session) --- | |
| # each site is a dict containing all fields you requested | |
| SITES: Dict[str, Dict[str, Any]] = {} # site_id -> site dict | |
| # maximum sites allowed | |
| MAX_SITES = 4 | |
| # --- Initialize Groq client if key present --- | |
| groq_client = None | |
| if GROQ_AVAILABLE and GROQ_API_KEY: | |
| try: | |
| groq_client = Groq(api_key=GROQ_API_KEY) | |
| except Exception as e: | |
| groq_client = None | |
| # --- Initialize Earth Engine if possible --- | |
| EE_READY = False | |
| if EE_AVAILABLE and SERVICE_ACCOUNT and EARTH_ENGINE_KEY: | |
| try: | |
| # write key to temp file and initialize | |
| key_path = "/tmp/ee_service_account_key.json" | |
| with open(key_path, "w") as f: | |
| f.write(EARTH_ENGINE_KEY) | |
| credentials = ee.ServiceAccountCredentials(SERVICE_ACCOUNT, key_path) | |
| ee.Initialize(credentials) | |
| EE_READY = True | |
| except Exception as e: | |
| EE_READY = False | |
| # --- App init --- | |
| app = FastAPI(title="GeoMate V3 Backend") | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["*"], # adjust in prod | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| # serve the frontend build (will be copied into /app/frontend_build by Dockerfile) | |
| if os.path.isdir("/app/frontend_build"): | |
| app.mount("/", StaticFiles(directory="/app/frontend_build", html=True), name="frontend") | |
| # ---------------------------- | |
| # Utility functions | |
| # ---------------------------- | |
| def _new_site_template(name: str) -> Dict[str, Any]: | |
| """Return default empty site structure.""" | |
| return { | |
| "id": str(uuid.uuid4()), | |
| "Site Name": name, | |
| "Site Coordinates": "", | |
| "lat": None, | |
| "lon": None, | |
| "Load Bearing Capacity": None, | |
| "Skin Shear Strength": None, | |
| "Relative Compaction": None, | |
| "Rate of Consolidation": None, | |
| "Nature of Construction": None, | |
| "Soil Profile": None, | |
| "Flood Data": None, | |
| "Seismic Data": None, | |
| "Topography": None, | |
| "GSD": None, | |
| "USCS": None, | |
| "AASHTO": None, | |
| "GI": None, | |
| "classifier_inputs": {}, | |
| "classifier_decision_path": "", | |
| "chat_history": [], | |
| "report_convo_state": 0, | |
| "map_snapshot": None, | |
| "laboratory_results": [], | |
| "created_at": datetime.datetime.utcnow().isoformat() | |
| } | |
| def _interp_D(diameters: List[float], passing: List[float], target_percent: float) -> Optional[float]: | |
| """Interpolate D value (mm) for a given % passing using log-scale interpolation. | |
| diameters: list of diameters (mm) | |
| passing: list of % passing (same length) | |
| target_percent: e.g., 10 for D10 | |
| """ | |
| try: | |
| d = np.array(diameters, dtype=float) | |
| p = np.array(passing, dtype=float) | |
| # require strictly monotonic p vs d: rearrange by increasing diameter | |
| order = np.argsort(d) | |
| d_sorted = d[order] | |
| p_sorted = p[order] | |
| # If p_sorted is increasing with diameter? We expect passing to decrease with diameter. | |
| # For interpolation, we invert axes: percent passing vs log(diameter) | |
| logd = np.log10(d_sorted) | |
| # check bounds | |
| if target_percent < p_sorted.min() or target_percent > p_sorted.max(): | |
| # out of interpolation bounds -> return None | |
| return None | |
| val = np.interp(target_percent, p_sorted[::-1], 10 ** np.interp(target_percent, p_sorted[::-1], logd[::-1])) | |
| # But the above double interpolation is messy; simpler approach: | |
| # Use linear interpolation on logd vs p_sorted reversed so percent decreases with logd | |
| x = p_sorted[::-1] | |
| y = logd[::-1] | |
| logd_target = np.interp(target_percent, x, y) | |
| return float(10 ** logd_target) | |
| except Exception: | |
| return None | |
| def _compute_gsd_properties(diameters: List[float], passing: List[float]) -> Dict[str, Any]: | |
| """Return D10,D30,D60,Cu,Cc, and GSD plot (PNG bytes).""" | |
| # ensure arrays | |
| # If user gave descending diameters (75 -> 0.075), algorithm still sorts | |
| try: | |
| D10 = _interp_D(diameters, passing, 10) | |
| D30 = _interp_D(diameters, passing, 30) | |
| D60 = _interp_D(diameters, passing, 60) | |
| Cu = (D60 / D10) if (D10 and D60 and D10 > 0) else None | |
| Cc = ((D30 ** 2) / (D10 * D60)) if (D10 and D30 and D60 and D10 > 0 and D60 > 0) else None | |
| # plot GSD | |
| fig, ax = plt.subplots(figsize=(6, 3.5)) | |
| ax.semilogx(diameters, passing, marker='o', linestyle='-') | |
| ax.set_xlabel("Particle size (mm)") | |
| ax.set_ylabel("% Passing") | |
| ax.grid(which='both', linestyle='--', linewidth=0.5, alpha=0.8) | |
| ax.set_xscale('log') | |
| ax.invert_xaxis() | |
| # annotate D10 D30 D60 | |
| for dval, lab in [(D10, 'D10'), (D30, 'D30'), (D60, 'D60')]: | |
| if dval: | |
| # find passing at this diameter by interpolation | |
| # rough: use numpy.interp on log scale | |
| logd = np.log10(diameters) | |
| pass_at = np.interp(np.log10(dval), logd, passing) | |
| ax.plot(dval, pass_at, 'ro') | |
| ax.annotate(f"{lab}={dval:.3g}", xy=(dval, pass_at), xytext=(5, -15), textcoords='offset points', color='red', fontsize=8) | |
| buf = io.BytesIO() | |
| plt.tight_layout() | |
| fig.savefig(buf, format='png', dpi=130) | |
| plt.close(fig) | |
| buf.seek(0) | |
| png_bytes = buf.read() | |
| buf.close() | |
| return { | |
| "D10": D10, "D30": D30, "D60": D60, | |
| "Cu": Cu, "Cc": Cc, | |
| "plot_png": base64.b64encode(png_bytes).decode("ascii") | |
| } | |
| except Exception as e: | |
| return {"error": str(e)} | |
| # ---------------------------- | |
| # Classifier logic (verbatim translation of your CLI code) | |
| # ---------------------------- | |
| def uscs_aashto_logic(inputs: Dict[str, Any]) -> Dict[str, Any]: | |
| """ | |
| Implements the USCS and AASHTO logic you provided previously, returning: | |
| { uscs, uscs_explanation, aashto, GI, decision_text, engineering_characteristics } | |
| """ | |
| # read inputs with defaults | |
| opt = str(inputs.get("opt", "n")).lower() # 'y' for organic | |
| try: | |
| P2 = float(inputs.get("P2", 0.0)) | |
| except Exception: | |
| P2 = 0.0 | |
| try: | |
| P4 = float(inputs.get("P4", 0.0)) | |
| except Exception: | |
| P4 = 0.0 | |
| D60 = float(inputs.get("D60", 0.0) or 0.0) | |
| D30 = float(inputs.get("D30", 0.0) or 0.0) | |
| D10 = float(inputs.get("D10", 0.0) or 0.0) | |
| LL = float(inputs.get("LL", 0.0) or 0.0) | |
| PL = float(inputs.get("PL", 0.0) or 0.0) | |
| PI = LL - PL | |
| # engineering characteristics (detailed mapping) | |
| ENGINEERING_CHARACTERISTICS = { | |
| "Gravel": { | |
| "Settlement": "None", | |
| "Quicksand": "Impossible", | |
| "Frost-heaving": "None", | |
| "Groundwater_lowering": "Possible", | |
| "Cement_grouting": "Possible", | |
| "Silicate_bitumen_injections": "Unsuitable", | |
| "Compressed_air": "Possible (see notes)" | |
| }, | |
| "Coarse sand": { | |
| "Settlement": "None", | |
| "Quicksand": "Impossible", | |
| "Frost-heaving": "None", | |
| "Groundwater_lowering": "Possible", | |
| "Cement_grouting": "Possible only if very coarse", | |
| "Silicate_bitumen_injections": "Suitable", | |
| "Compressed_air": "Suitable" | |
| }, | |
| "Medium sand": { | |
| "Settlement": "None", | |
| "Quicksand": "Unlikely", | |
| "Frost-heaving": "None", | |
| "Groundwater_lowering": "Suitable", | |
| "Cement_grouting": "Impossible", | |
| "Silicate_bitumen_injections": "Suitable", | |
| "Compressed_air": "Suitable" | |
| }, | |
| "Fine sand": { | |
| "Settlement": "None", | |
| "Quicksand": "Liable", | |
| "Frost-heaving": "None", | |
| "Groundwater_lowering": "Suitable", | |
| "Cement_grouting": "Impossible", | |
| "Silicate_bitumen_injections": "Not possible in very fine sands", | |
| "Compressed_air": "Suitable" | |
| }, | |
| "Silt": { | |
| "Settlement": "Occurs", | |
| "Quicksand": "Liable (very coarse silts may behave differently)", | |
| "Frost-heaving": "Occurs", | |
| "Groundwater_lowering": "Generally not suitable (electro-osmosis possible)", | |
| "Cement_grouting": "Impossible", | |
| "Silicate_bitumen_injections": "Impossible", | |
| "Compressed_air": "Suitable" | |
| }, | |
| "Clay": { | |
| "Settlement": "Occurs", | |
| "Quicksand": "Impossible", | |
| "Frost-heaving": "None", | |
| "Groundwater_lowering": "Impossible (generally)", | |
| "Cement_grouting": "Only in stiff fissured clay", | |
| "Silicate_bitumen_injections": "Impossible", | |
| "Compressed_air": "Used for support only in special cases" | |
| } | |
| } | |
| # Compute Cu, Cc if possible | |
| if D10 > 0 and D30 > 0 and D60 > 0: | |
| Cu = D60 / D10 | |
| Cc = (D30 ** 2) / (D10 * D60) | |
| else: | |
| Cu = None | |
| Cc = None | |
| # USCS logic (verbatim) | |
| uscs = "Unknown" | |
| uscs_expl = "" | |
| if opt == 'y': | |
| uscs = "Pt" | |
| uscs_expl = "Peat / organic soil β compressible, high organic content; poor engineering properties for load-bearing without special treatment." | |
| else: | |
| if P2 <= 50: | |
| # coarse-grained | |
| if P4 <= 50: | |
| # gravels category (from your script) | |
| if Cu and Cc: | |
| if Cu >= 4 and 1 <= Cc <= 3: | |
| uscs = "GW" | |
| uscs_expl = "Well-graded gravel (good engineering properties, high strength, good drainage)." | |
| else: | |
| uscs = "GP" | |
| uscs_expl = "Poorly-graded gravel (uniform/grading issues)." | |
| else: | |
| if PI < 4 or PI < 0.73 * (LL - 20): | |
| uscs = "GM" | |
| uscs_expl = "Silty gravel (fines; lower permeability)." | |
| elif PI > 7 and PI > 0.73 * (LL - 20): | |
| uscs = "GC" | |
| uscs_expl = "Clayey gravel (clayey fines; higher plasticity)." | |
| else: | |
| uscs = "GM-GC" | |
| uscs_expl = "Gravel with mixed silt/clay fines." | |
| else: | |
| # sands | |
| if Cu and Cc: | |
| if Cu >= 6 and 1 <= Cc <= 3: | |
| uscs = "SW" | |
| uscs_expl = "Well-graded sand (good compaction, drainage)." | |
| else: | |
| uscs = "SP" | |
| uscs_expl = "Poorly-graded sand." | |
| else: | |
| if PI < 4 or PI <= 0.73 * (LL - 20): | |
| uscs = "SM" | |
| uscs_expl = "Silty sand (low-plasticity fines)." | |
| elif PI > 7 and PI > 0.73 * (LL - 20): | |
| uscs = "SC" | |
| uscs_expl = "Clayey sand (higher-plasticity fines)." | |
| else: | |
| uscs = "SM-SC" | |
| uscs_expl = "Transition between silty and clayey sand." | |
| else: | |
| # fine-grained soils | |
| nDS = int(inputs.get("nDS", 5)) | |
| nDIL = int(inputs.get("nDIL", 6)) | |
| nTG = int(inputs.get("nTG", 6)) | |
| if LL < 50: | |
| if 20 <= LL < 50 and PI <= 0.73 * (LL - 20): | |
| if nDS == 1 or nDIL == 3 or nTG == 3: | |
| uscs = "ML" | |
| uscs_expl = "Silt (low plasticity)" | |
| elif nDS == 3 or nDIL == 3 or nTG == 3: | |
| uscs = "OL" | |
| uscs_expl = "Organic silt (low plasticity)" | |
| else: | |
| uscs = "ML-OL" | |
| uscs_expl = "Mixed silt/organic silt" | |
| elif 10 <= LL <= 30 and 4 <= PI <= 7 and PI > 0.72 * (LL - 20): | |
| if nDS == 1 or nDIL == 1 or nTG == 1: | |
| uscs = "ML" | |
| uscs_expl = "Silt" | |
| elif nDS == 2 or nDIL == 2 or nTG == 2: | |
| uscs = "CL" | |
| uscs_expl = "Clay (low plasticity)" | |
| else: | |
| uscs = "ML-CL" | |
| uscs_expl = "Mixed silt/clay" | |
| else: | |
| uscs = "CL" | |
| uscs_expl = "Clay (low plasticity)" | |
| else: | |
| if PI < 0.73 * (LL - 20): | |
| if nDS == 3 or nDIL == 4 or nTG == 4: | |
| uscs = "MH" | |
| uscs_expl = "High plasticity silt" | |
| elif nDS == 2 or nDIL == 2 or nTG == 4: | |
| uscs = "OH" | |
| uscs_expl = "Organic silt/clay (high plasticity)" | |
| else: | |
| uscs = "MH-OH" | |
| uscs_expl = "Mixed high-plasticity silt/organic" | |
| else: | |
| uscs = "CH" | |
| uscs_expl = "Clay (high plasticity)" | |
| # AASHTO logic | |
| if P2 <= 35: | |
| if P2 <= 15 and P4 <= 30 and PI <= 6: | |
| aashto = "A-1-a" | |
| elif P2 <= 25 and P4 <= 50 and PI <= 6: | |
| aashto = "A-1-b" | |
| elif P2 <= 35 and P4 > 0: | |
| if LL <= 40 and PI <= 10: | |
| aashto = "A-2-4" | |
| elif LL >= 41 and PI <= 10: | |
| aashto = "A-2-5" | |
| elif LL <= 40 and PI >= 11: | |
| aashto = "A-2-6" | |
| elif LL >= 41 and PI >= 11: | |
| aashto = "A-2-7" | |
| else: | |
| aashto = "A-2" | |
| else: | |
| aashto = "A-3" | |
| else: | |
| if LL <= 40 and PI <= 10: | |
| aashto = "A-4" | |
| elif LL >= 41 and PI <= 10: | |
| aashto = "A-5" | |
| elif LL <= 40 and PI >= 11: | |
| aashto = "A-6" | |
| else: | |
| aashto = "A-7-5" if PI <= (LL - 30) else "A-7-6" | |
| # Group Index (GI) | |
| a = P2 - 35 | |
| a = 0 if a < 0 else (40 if a > 40 else a) | |
| b = P2 - 15 | |
| b = 0 if b < 0 else (40 if b > 40 else b) | |
| c = LL - 40 | |
| c = 0 if c < 0 else (20 if c > 20 else c) | |
| d = PI - 10 | |
| d = 0 if d < 0 else (20 if d > 20 else d) | |
| GI = math.floor(0.2 * a + 0.005 * a * c + 0.01 * b * d) | |
| aashto_expl = f"{aashto} (Group Index = {GI})" | |
| # engineering characteristics guess | |
| if uscs.startswith("G") or uscs.startswith("S"): | |
| eng_char = ENGINEERING_CHARACTERISTICS.get("Coarse sand") # approximate | |
| elif uscs.startswith(("M","C","O","H")): | |
| eng_char = ENGINEERING_CHARACTERISTICS.get("Silt") | |
| else: | |
| eng_char = ENGINEERING_CHARACTERISTICS.get("Silt") | |
| # decision text summarizing applied rules | |
| dt_lines = [ | |
| f"USCS classification: {uscs} β {uscs_expl}", | |
| f"AASHTO classification: {aashto_expl}", | |
| f"Cu = {Cu:.3f}" if Cu else "Cu = N/A", | |
| f"Cc = {Cc:.3f}" if Cc else "Cc = N/A" | |
| ] | |
| decision_text = "\n".join(dt_lines) | |
| return { | |
| "uscs": uscs, | |
| "uscs_expl": uscs_expl, | |
| "aashto": aashto, | |
| "GI": GI, | |
| "decision_text": decision_text, | |
| "Cu": Cu, | |
| "Cc": Cc, | |
| "engineering_characteristics": eng_char | |
| } | |
| # ---------------------------- | |
| # API Models | |
| # ---------------------------- | |
| class ClassifyInput(BaseModel): | |
| opt: Optional[str] = "n" | |
| P2: Optional[float] = 0.0 | |
| P4: Optional[float] = 0.0 | |
| D60: Optional[float] = 0.0 | |
| D30: Optional[float] = 0.0 | |
| D10: Optional[float] = 0.0 | |
| LL: Optional[float] = 0.0 | |
| PL: Optional[float] = 0.0 | |
| nDS: Optional[int] = 5 | |
| nDIL: Optional[int] = 6 | |
| nTG: Optional[int] = 6 | |
| # ---------------------------- | |
| # API Endpoints | |
| # ---------------------------- | |
| async def health(): | |
| return { | |
| "status": "ok", | |
| "groq_available": groq_client is not None, | |
| "earth_engine_ready": EE_READY | |
| } | |
| async def list_sites(): | |
| return {"sites": list(SITES.values())} | |
| async def create_site(name: str = Form(...)): | |
| if len(SITES) >= MAX_SITES: | |
| raise HTTPException(status_code=400, detail=f"Max {MAX_SITES} sites allowed.") | |
| site = _new_site_template(name) | |
| SITES[site['id']] = site | |
| return {"site": site} | |
| async def update_site(site_id: str, payload: Dict[str, Any]): | |
| if site_id not in SITES: | |
| raise HTTPException(status_code=404, detail="Site not found") | |
| SITES[site_id].update(payload) | |
| return {"site": SITES[site_id]} | |
| async def get_site(site_id: str): | |
| if site_id not in SITES: | |
| raise HTTPException(status_code=404, detail="Site not found") | |
| return {"site": SITES[site_id]} | |
| async def delete_site(site_id: str): | |
| if site_id in SITES: | |
| del SITES[site_id] | |
| return {"deleted": site_id} | |
| async def classify_endpoint(payload: ClassifyInput): | |
| inp = payload.dict() | |
| result = uscs_aashto_logic(inp) | |
| # Save decision and inputs in a temp site called "last" (caller should save to real site) | |
| return {"result": result} | |
| async def gsd_endpoint(payload: Dict[str, Any]): | |
| """Payload: { 'diameters': [..], 'passing': [..] }""" | |
| diams = payload.get("diameters", []) | |
| passing = payload.get("passing", []) | |
| if not diams or not passing: | |
| raise HTTPException(status_code=400, detail="Provide diameters and passing arrays.") | |
| gsd = _compute_gsd_properties(diams, passing) | |
| return {"gsd": gsd} | |
| async def ocr_endpoint(file: UploadFile = File(...)): | |
| # Save and OCR | |
| raw = await file.read() | |
| try: | |
| img = Image.open(io.BytesIO(raw)).convert("RGB") | |
| except Exception as e: | |
| raise HTTPException(status_code=400, detail=f"Cannot open image: {e}") | |
| text = pytesseract.image_to_string(img) | |
| # heuristic parsing for common items: find numbers next to LL, PL, D10... | |
| parsed = {} | |
| import re | |
| # Find LL, PL, PI patterns | |
| m = re.search(r"LL[:=\s]*([0-9]{1,3}\.?\d*)", text, re.IGNORECASE) | |
| if m: | |
| parsed["LL"] = float(m.group(1)) | |
| m = re.search(r"PL[:=\s]*([0-9]{1,3}\.?\d*)", text, re.IGNORECASE) | |
| if m: | |
| parsed["PL"] = float(m.group(1)) | |
| # D10/D30/D60 | |
| for key in ["D10", "D30", "D60"]: | |
| m = re.search(rf"{key}[:=\s]*([0-9]*\.?[0-9]+)", text, re.IGNORECASE) | |
| if m: | |
| parsed[key] = float(m.group(1)) | |
| # % passing pattern | |
| pct_matches = re.findall(r"([0-9]{1,3}\.?\d*)\s*%", text) | |
| if pct_matches and "passing_sample" not in parsed: | |
| # just attach the first few percentages for user to inspect | |
| parsed["passing_percent_sample"] = pct_matches[:10] | |
| return {"text": text, "parsed": parsed} | |
| async def locator_endpoint(payload: Dict[str, Any]): | |
| """ | |
| Accepts: | |
| - { "lat": float, "lon": float } OR | |
| - { "geojson": {...} } | |
| Returns: | |
| - topography (elevation) | |
| - seismic events (USGS past 20 years within radius) | |
| - basic flood indicator (based on precipitation trend via CHIRPS if EE enabled) | |
| - NDVI / landcover sample (if EE enabled) | |
| """ | |
| lat = payload.get("lat") | |
| lon = payload.get("lon") | |
| geojson = payload.get("geojson") | |
| if not (lat and lon) and not geojson: | |
| raise HTTPException(status_code=400, detail="Provide lat/lon or geojson.") | |
| # get seismic data via USGS API (past 20 years) | |
| try: | |
| end = datetime.date.today() | |
| start = end - datetime.timedelta(days=365*20) | |
| usgs_url = "https://earthquake.usgs.gov/fdsnws/event/1/query" | |
| params = { | |
| "format": "geojson", | |
| "starttime": start.isoformat(), | |
| "endtime": end.isoformat(), | |
| "latitude": lat, | |
| "longitude": lon, | |
| "maxradiuskm": 100 # 100 km radius; you can adjust | |
| } | |
| r = requests.get(usgs_url, params=params, timeout=30) | |
| usgs = r.json() | |
| events = usgs.get("features", []) | |
| quake_count = len(events) | |
| max_mag = max([ev["properties"].get("mag") or 0 for ev in events]) if events else 0 | |
| except Exception as e: | |
| quake_count = None | |
| max_mag = None | |
| # Elevation via Earth Engine/SRTM if EE is ready | |
| topo = None | |
| ndvi_summary = None | |
| flood_indicator = None | |
| if EE_READY: | |
| try: | |
| point = ee.Geometry.Point([lon, lat]) | |
| srtm = ee.Image("USGS/SRTMGL1_003") | |
| elev = srtm.sample(point, 30).first().get("elevation").getInfo() | |
| topo = {"elevation_m": elev} | |
| # For NDVI we sample MODIS or Sentinel | |
| try: | |
| collection = ee.ImageCollection("MODIS/006/MOD13A1").select("NDVI").filterDate((datetime.date.today()-datetime.timedelta(days=365)), datetime.date.today()) | |
| mean_ndvi = collection.mean().sample(point, 30).first().get("NDVI").getInfo() | |
| ndvi_summary = {"mean_ndvi": mean_ndvi} | |
| except Exception: | |
| ndvi_summary = None | |
| # flood estimation: use annual rainfall statistics from CHIRPS (example) | |
| try: | |
| chirps = ee.ImageCollection("UCSB-CHG/CHIRPS/DAILY").filterDate(start.isoformat(), end.isoformat()) | |
| annual_sum = chirps.reduce(ee.Reducer.sum()) | |
| sample = annual_sum.sample(point, 1000).first().get("precipitation_sum").getInfo() | |
| flood_indicator = {"annual_precip_sum": sample} | |
| except Exception: | |
| flood_indicator = None | |
| except Exception: | |
| topo = None | |
| return { | |
| "seismic": {"count_last_20yrs": quake_count, "max_magnitude": max_mag}, | |
| "topography": topo, | |
| "ndvi": ndvi_summary, | |
| "flood_indicator": flood_indicator | |
| } | |
| async def chat_endpoint(payload: Dict[str, Any]): | |
| """ | |
| Basic chat endpoint that forwards to Groq if available. | |
| payload: { "site_id": str, "message": str, "history": [ {role, content}, ... ] } | |
| """ | |
| message = payload.get("message", "") | |
| site_id = payload.get("site_id") | |
| history = payload.get("history", []) | |
| # Save user message into site chat history if provided | |
| if site_id and site_id in SITES: | |
| SITES[site_id]["chat_history"].append({"role": "user", "text": message, "ts": datetime.datetime.utcnow().isoformat()}) | |
| # If groq configured | |
| if groq_client: | |
| try: | |
| # format messages: system + history + user | |
| messages = [{"role": "system", "content": "You are GeoMate, a friendly professional geotechnical engineer assistant."}] | |
| for m in history: | |
| messages.append({"role": m.get("role", "user"), "content": m.get("content", "")}) | |
| messages.append({"role": "user", "content": message}) | |
| completion = groq_client.chat.completions.create( | |
| model="meta-llama/llama-4-maverick-17b-128e-instruct", | |
| messages=messages, | |
| temperature=0.2 | |
| ) | |
| reply = completion.choices[0].message.content | |
| except Exception as e: | |
| reply = f"[Groq error] {e}" | |
| else: | |
| # Fallback simple rule-based reply | |
| reply = f"GeoMate (local fallback): Received your question: '{message[:200]}'. Groq is not configured on the server." | |
| # Save assistant reply | |
| if site_id and site_id in SITES: | |
| SITES[site_id]["chat_history"].append({"role": "assistant", "text": reply, "ts": datetime.datetime.utcnow().isoformat()}) | |
| # attempt to auto-extract parameters from reply (placeholder) | |
| # You can implement update_site_description_from_chat logic here | |
| return {"reply": reply} | |
| # ---------------------------- | |
| # PDF report generator | |
| # ---------------------------- | |
| def _build_pdf_report_bytes(site: Dict[str, Any], include_classification_only: bool = False) -> bytes: | |
| """ | |
| Build a professional PDF as bytes. Uses ReportLab. | |
| If include_classification_only True -> generate classification-only report. | |
| """ | |
| buf = io.BytesIO() | |
| doc = SimpleDocTemplate(buf, pagesize=A4, leftMargin=18*mm, rightMargin=18*mm, topMargin=18*mm, bottomMargin=18*mm) | |
| styles = getSampleStyleSheet() | |
| elems = [] | |
| title_style = ParagraphStyle("title", parent=styles["Title"], fontSize=20, alignment=1, textColor=colors.HexColor("#FF7A00")) | |
| h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=colors.HexColor("#1F4E79")) | |
| body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10) | |
| # Cover | |
| elems.append(Paragraph("GEOTECHNICAL INVESTIGATION REPORT", title_style)) | |
| elems.append(Spacer(1, 6)) | |
| elems.append(Paragraph(f"Project: {site.get('Site Name', 'β')}", body)) | |
| elems.append(Paragraph(f"Generated: {datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC')}", body)) | |
| elems.append(Spacer(1, 8)) | |
| # Summary | |
| elems.append(Paragraph("SUMMARY", h1)) | |
| summary_lines = [ | |
| f"Site name: {site.get('Site Name', 'β')}", | |
| f"Coordinates: {site.get('Site Coordinates', '')} (lat:{site.get('lat')}, lon:{site.get('lon')})", | |
| f"Available data: Laboratory results: {len(site.get('laboratory_results', []))} samples", | |
| ] | |
| for ln in summary_lines: | |
| elems.append(Paragraph(ln, body)) | |
| elems.append(Spacer(1, 8)) | |
| # Classification-only simpler section | |
| if include_classification_only: | |
| elems.append(Paragraph("CLASSIFICATION (Abridged)", h1)) | |
| elems.append(Paragraph(site.get("classifier_decision_path", "Decision path not available."), body)) | |
| # attach GSD image if present | |
| gsd = site.get("GSD") | |
| if gsd and isinstance(gsd, dict) and gsd.get("plot_png"): | |
| pngdata = base64.b64decode(gsd["plot_png"]) | |
| img = io.BytesIO(pngdata) | |
| rlimg = RLImage(img, width=160*mm, height=80*mm) | |
| elems.append(rlimg) | |
| doc.build(elems) | |
| pdf_bytes = buf.getvalue() | |
| buf.close() | |
| return pdf_bytes | |
| # Full report sections | |
| elems.append(Paragraph("1.0 INTRODUCTION", h1)) | |
| elems.append(Paragraph("This report was prepared by GeoMate AI as a Phase 1 geotechnical investigation summary.", body)) | |
| elems.append(Paragraph("2.0 SITE DESCRIPTION AND GEOLOGY", h1)) | |
| elems.append(Paragraph(site.get("Soil Profile", "Site description not provided."), body)) | |
| # Field and lab data | |
| elems.append(Paragraph("3.0 FIELD INVESTIGATION & LABORATORY TESTING", h1)) | |
| # lab table if present | |
| lab_rows = site.get("laboratory_results", []) | |
| if lab_rows: | |
| headers = ["Sample ID", "Material", "LL", "PI", "LS", "%Clay", "%Silt", "%Sand", "%Gravel"] | |
| data = [headers] | |
| for r in lab_rows: | |
| data.append([r.get("sampleId", "β"), r.get("material", "β"), r.get("liquidLimit", "β"), r.get("plasticityIndex", "β"), | |
| r.get("linearShrinkage", "β"), r.get("percentClay", "β"), r.get("percentSilt", "β"), | |
| r.get("percentSand", "β"), r.get("percentGravel", "β")]) | |
| t = Table(data, repeatRows=1, colWidths=[40*mm, 40*mm, 15*mm, 15*mm, 12*mm, 12*mm, 12*mm, 12*mm, 12*mm]) | |
| t.setStyle(TableStyle([ | |
| ("GRID", (0, 0), (-1, -1), 0.5, colors.grey), | |
| ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#1F4E79")), | |
| ("TEXTCOLOR", (0, 0), (-1, 0), colors.white), | |
| ])) | |
| elems.append(t) | |
| else: | |
| elems.append(Paragraph("No laboratory results provided.", body)) | |
| # GSD plot include | |
| gsd = site.get("GSD") | |
| if gsd and isinstance(gsd, dict) and gsd.get("plot_png"): | |
| elems.append(Spacer(1, 8)) | |
| elems.append(Paragraph("GSD Curve", h1)) | |
| pngdata = base64.b64decode(gsd["plot_png"]) | |
| img = io.BytesIO(pngdata) | |
| rlimg = RLImage(img, width=160*mm, height=80*mm) | |
| elems.append(rlimg) | |
| # maps snapshot | |
| if site.get("map_snapshot"): | |
| elems.append(Spacer(1, 8)) | |
| elems.append(Paragraph("Map snapshot", h1)) | |
| try: | |
| pngdata = base64.b64decode(site["map_snapshot"]) | |
| img = io.BytesIO(pngdata) | |
| rlimg = RLImage(img, width=160*mm, height=100*mm) | |
| elems.append(rlimg) | |
| except Exception: | |
| elems.append(Paragraph("Map snapshot could not be decoded.", body)) | |
| # Evaluation & recommendations | |
| elems.append(Spacer(1, 8)) | |
| elems.append(Paragraph("4.0 EVALUATION OF GEOTECHNICAL PROPERTIES", h1)) | |
| # Use classifier decision if present | |
| if site.get("classifier_decision_path"): | |
| elems.append(Paragraph(site["classifier_decision_path"], body)) | |
| else: | |
| elems.append(Paragraph("Classification details not provided.", body)) | |
| elems.append(Spacer(1, 8)) | |
| elems.append(Paragraph("5.0 RECOMMENDATIONS", h1)) | |
| recs = [ | |
| "Ensure positive surface drainage away from structures.", | |
| "Remove unsuitable fill from beneath foundation footprints.", | |
| "For lightly loaded structures, an allowable bearing pressure of 100 - 150 kPa may be used after further assessment.", | |
| "For heavily loaded structures consider piled foundations if poor soils extend beyond 2 m." | |
| ] | |
| for r in recs: | |
| elems.append(Paragraph(r, body)) | |
| doc.build(elems) | |
| pdf_bytes = buf.getvalue() | |
| buf.close() | |
| return pdf_bytes | |
| async def report_endpoint(site_id: str = Form(...), classification_only: bool = Form(False)): | |
| if site_id not in SITES: | |
| raise HTTPException(status_code=404, detail="Site not found") | |
| site = SITES[site_id] | |
| pdf_bytes = _build_pdf_report_bytes(site, include_classification_only=classification_only) | |
| # write file to /tmp | |
| out_path = f"/tmp/geomate_report_{site_id}.pdf" | |
| with open(out_path, "wb") as f: | |
| f.write(pdf_bytes) | |
| return FileResponse(out_path, media_type="application/pdf", filename=f"{site.get('Site Name','report')}.pdf") | |
| # End of backend file |