Spaces:
Runtime error
Runtime error
Update backend/app/main.py
Browse files- backend/app/main.py +266 -1
backend/app/main.py
CHANGED
|
@@ -563,4 +563,269 @@ async def ocr_endpoint(file: UploadFile = File(...)):
|
|
| 563 |
if m:
|
| 564 |
parsed[key] = float(m.group(1))
|
| 565 |
# % passing pattern
|
| 566 |
-
pct_matches = re.findall(r"([0-9]{1,3}\.?\
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 563 |
if m:
|
| 564 |
parsed[key] = float(m.group(1))
|
| 565 |
# % passing pattern
|
| 566 |
+
pct_matches = re.findall(r"([0-9]{1,3}\.?\d*)\s*%", text)
|
| 567 |
+
if pct_matches and "passing_sample" not in parsed:
|
| 568 |
+
# just attach the first few percentages for user to inspect
|
| 569 |
+
parsed["passing_percent_sample"] = pct_matches[:10]
|
| 570 |
+
return {"text": text, "parsed": parsed}
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
@app.post("/api/locator")
|
| 574 |
+
async def locator_endpoint(payload: Dict[str, Any]):
|
| 575 |
+
"""
|
| 576 |
+
Accepts:
|
| 577 |
+
- { "lat": float, "lon": float } OR
|
| 578 |
+
- { "geojson": {...} }
|
| 579 |
+
Returns:
|
| 580 |
+
- topography (elevation)
|
| 581 |
+
- seismic events (USGS past 20 years within radius)
|
| 582 |
+
- basic flood indicator (based on precipitation trend via CHIRPS if EE enabled)
|
| 583 |
+
- NDVI / landcover sample (if EE enabled)
|
| 584 |
+
"""
|
| 585 |
+
lat = payload.get("lat")
|
| 586 |
+
lon = payload.get("lon")
|
| 587 |
+
geojson = payload.get("geojson")
|
| 588 |
+
if not (lat and lon) and not geojson:
|
| 589 |
+
raise HTTPException(status_code=400, detail="Provide lat/lon or geojson.")
|
| 590 |
+
|
| 591 |
+
# get seismic data via USGS API (past 20 years)
|
| 592 |
+
try:
|
| 593 |
+
end = datetime.date.today()
|
| 594 |
+
start = end - datetime.timedelta(days=365*20)
|
| 595 |
+
usgs_url = "https://earthquake.usgs.gov/fdsnws/event/1/query"
|
| 596 |
+
params = {
|
| 597 |
+
"format": "geojson",
|
| 598 |
+
"starttime": start.isoformat(),
|
| 599 |
+
"endtime": end.isoformat(),
|
| 600 |
+
"latitude": lat,
|
| 601 |
+
"longitude": lon,
|
| 602 |
+
"maxradiuskm": 100 # 100 km radius; you can adjust
|
| 603 |
+
}
|
| 604 |
+
r = requests.get(usgs_url, params=params, timeout=30)
|
| 605 |
+
usgs = r.json()
|
| 606 |
+
events = usgs.get("features", [])
|
| 607 |
+
quake_count = len(events)
|
| 608 |
+
max_mag = max([ev["properties"].get("mag") or 0 for ev in events]) if events else 0
|
| 609 |
+
except Exception as e:
|
| 610 |
+
quake_count = None
|
| 611 |
+
max_mag = None
|
| 612 |
+
|
| 613 |
+
# Elevation via Earth Engine/SRTM if EE is ready
|
| 614 |
+
topo = None
|
| 615 |
+
ndvi_summary = None
|
| 616 |
+
flood_indicator = None
|
| 617 |
+
if EE_READY:
|
| 618 |
+
try:
|
| 619 |
+
point = ee.Geometry.Point([lon, lat])
|
| 620 |
+
srtm = ee.Image("USGS/SRTMGL1_003")
|
| 621 |
+
elev = srtm.sample(point, 30).first().get("elevation").getInfo()
|
| 622 |
+
topo = {"elevation_m": elev}
|
| 623 |
+
# For NDVI we sample MODIS or Sentinel
|
| 624 |
+
try:
|
| 625 |
+
collection = ee.ImageCollection("MODIS/006/MOD13A1").select("NDVI").filterDate((datetime.date.today()-datetime.timedelta(days=365)), datetime.date.today())
|
| 626 |
+
mean_ndvi = collection.mean().sample(point, 30).first().get("NDVI").getInfo()
|
| 627 |
+
ndvi_summary = {"mean_ndvi": mean_ndvi}
|
| 628 |
+
except Exception:
|
| 629 |
+
ndvi_summary = None
|
| 630 |
+
# flood estimation: use annual rainfall statistics from CHIRPS (example)
|
| 631 |
+
try:
|
| 632 |
+
chirps = ee.ImageCollection("UCSB-CHG/CHIRPS/DAILY").filterDate(start.isoformat(), end.isoformat())
|
| 633 |
+
annual_sum = chirps.reduce(ee.Reducer.sum())
|
| 634 |
+
sample = annual_sum.sample(point, 1000).first().get("precipitation_sum").getInfo()
|
| 635 |
+
flood_indicator = {"annual_precip_sum": sample}
|
| 636 |
+
except Exception:
|
| 637 |
+
flood_indicator = None
|
| 638 |
+
except Exception:
|
| 639 |
+
topo = None
|
| 640 |
+
|
| 641 |
+
return {
|
| 642 |
+
"seismic": {"count_last_20yrs": quake_count, "max_magnitude": max_mag},
|
| 643 |
+
"topography": topo,
|
| 644 |
+
"ndvi": ndvi_summary,
|
| 645 |
+
"flood_indicator": flood_indicator
|
| 646 |
+
}
|
| 647 |
+
|
| 648 |
+
|
| 649 |
+
@app.post("/api/chat")
|
| 650 |
+
async def chat_endpoint(payload: Dict[str, Any]):
|
| 651 |
+
"""
|
| 652 |
+
Basic chat endpoint that forwards to Groq if available.
|
| 653 |
+
payload: { "site_id": str, "message": str, "history": [ {role, content}, ... ] }
|
| 654 |
+
"""
|
| 655 |
+
message = payload.get("message", "")
|
| 656 |
+
site_id = payload.get("site_id")
|
| 657 |
+
history = payload.get("history", [])
|
| 658 |
+
|
| 659 |
+
# Save user message into site chat history if provided
|
| 660 |
+
if site_id and site_id in SITES:
|
| 661 |
+
SITES[site_id]["chat_history"].append({"role": "user", "text": message, "ts": datetime.datetime.utcnow().isoformat()})
|
| 662 |
+
|
| 663 |
+
# If groq configured
|
| 664 |
+
if groq_client:
|
| 665 |
+
try:
|
| 666 |
+
# format messages: system + history + user
|
| 667 |
+
messages = [{"role": "system", "content": "You are GeoMate, a friendly professional geotechnical engineer assistant."}]
|
| 668 |
+
for m in history:
|
| 669 |
+
messages.append({"role": m.get("role", "user"), "content": m.get("content", "")})
|
| 670 |
+
messages.append({"role": "user", "content": message})
|
| 671 |
+
completion = groq_client.chat.completions.create(
|
| 672 |
+
model="meta-llama/llama-4-maverick-17b-128e-instruct",
|
| 673 |
+
messages=messages,
|
| 674 |
+
temperature=0.2
|
| 675 |
+
)
|
| 676 |
+
reply = completion.choices[0].message.content
|
| 677 |
+
except Exception as e:
|
| 678 |
+
reply = f"[Groq error] {e}"
|
| 679 |
+
else:
|
| 680 |
+
# Fallback simple rule-based reply
|
| 681 |
+
reply = f"GeoMate (local fallback): Received your question: '{message[:200]}'. Groq is not configured on the server."
|
| 682 |
+
|
| 683 |
+
# Save assistant reply
|
| 684 |
+
if site_id and site_id in SITES:
|
| 685 |
+
SITES[site_id]["chat_history"].append({"role": "assistant", "text": reply, "ts": datetime.datetime.utcnow().isoformat()})
|
| 686 |
+
# attempt to auto-extract parameters from reply (placeholder)
|
| 687 |
+
# You can implement update_site_description_from_chat logic here
|
| 688 |
+
return {"reply": reply}
|
| 689 |
+
|
| 690 |
+
|
| 691 |
+
# ----------------------------
|
| 692 |
+
# PDF report generator
|
| 693 |
+
# ----------------------------
|
| 694 |
+
def _build_pdf_report_bytes(site: Dict[str, Any], include_classification_only: bool = False) -> bytes:
|
| 695 |
+
"""
|
| 696 |
+
Build a professional PDF as bytes. Uses ReportLab.
|
| 697 |
+
If include_classification_only True -> generate classification-only report.
|
| 698 |
+
"""
|
| 699 |
+
buf = io.BytesIO()
|
| 700 |
+
doc = SimpleDocTemplate(buf, pagesize=A4, leftMargin=18*mm, rightMargin=18*mm, topMargin=18*mm, bottomMargin=18*mm)
|
| 701 |
+
styles = getSampleStyleSheet()
|
| 702 |
+
elems = []
|
| 703 |
+
|
| 704 |
+
title_style = ParagraphStyle("title", parent=styles["Title"], fontSize=20, alignment=1, textColor=colors.HexColor("#FF7A00"))
|
| 705 |
+
h1 = ParagraphStyle("h1", parent=styles["Heading1"], fontSize=14, textColor=colors.HexColor("#1F4E79"))
|
| 706 |
+
body = ParagraphStyle("body", parent=styles["BodyText"], fontSize=10)
|
| 707 |
+
|
| 708 |
+
# Cover
|
| 709 |
+
elems.append(Paragraph("GEOTECHNICAL INVESTIGATION REPORT", title_style))
|
| 710 |
+
elems.append(Spacer(1, 6))
|
| 711 |
+
elems.append(Paragraph(f"Project: {site.get('Site Name', 'β')}", body))
|
| 712 |
+
elems.append(Paragraph(f"Generated: {datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC')}", body))
|
| 713 |
+
elems.append(Spacer(1, 8))
|
| 714 |
+
|
| 715 |
+
# Summary
|
| 716 |
+
elems.append(Paragraph("SUMMARY", h1))
|
| 717 |
+
summary_lines = [
|
| 718 |
+
f"Site name: {site.get('Site Name', 'β')}",
|
| 719 |
+
f"Coordinates: {site.get('Site Coordinates', '')} (lat:{site.get('lat')}, lon:{site.get('lon')})",
|
| 720 |
+
f"Available data: Laboratory results: {len(site.get('laboratory_results', []))} samples",
|
| 721 |
+
]
|
| 722 |
+
for ln in summary_lines:
|
| 723 |
+
elems.append(Paragraph(ln, body))
|
| 724 |
+
elems.append(Spacer(1, 8))
|
| 725 |
+
|
| 726 |
+
# Classification-only simpler section
|
| 727 |
+
if include_classification_only:
|
| 728 |
+
elems.append(Paragraph("CLASSIFICATION (Abridged)", h1))
|
| 729 |
+
elems.append(Paragraph(site.get("classifier_decision_path", "Decision path not available."), body))
|
| 730 |
+
# attach GSD image if present
|
| 731 |
+
gsd = site.get("GSD")
|
| 732 |
+
if gsd and isinstance(gsd, dict) and gsd.get("plot_png"):
|
| 733 |
+
pngdata = base64.b64decode(gsd["plot_png"])
|
| 734 |
+
img = io.BytesIO(pngdata)
|
| 735 |
+
rlimg = RLImage(img, width=160*mm, height=80*mm)
|
| 736 |
+
elems.append(rlimg)
|
| 737 |
+
doc.build(elems)
|
| 738 |
+
pdf_bytes = buf.getvalue()
|
| 739 |
+
buf.close()
|
| 740 |
+
return pdf_bytes
|
| 741 |
+
|
| 742 |
+
# Full report sections
|
| 743 |
+
elems.append(Paragraph("1.0 INTRODUCTION", h1))
|
| 744 |
+
elems.append(Paragraph("This report was prepared by GeoMate AI as a Phase 1 geotechnical investigation summary.", body))
|
| 745 |
+
|
| 746 |
+
elems.append(Paragraph("2.0 SITE DESCRIPTION AND GEOLOGY", h1))
|
| 747 |
+
elems.append(Paragraph(site.get("Soil Profile", "Site description not provided."), body))
|
| 748 |
+
|
| 749 |
+
# Field and lab data
|
| 750 |
+
elems.append(Paragraph("3.0 FIELD INVESTIGATION & LABORATORY TESTING", h1))
|
| 751 |
+
# lab table if present
|
| 752 |
+
lab_rows = site.get("laboratory_results", [])
|
| 753 |
+
if lab_rows:
|
| 754 |
+
headers = ["Sample ID", "Material", "LL", "PI", "LS", "%Clay", "%Silt", "%Sand", "%Gravel"]
|
| 755 |
+
data = [headers]
|
| 756 |
+
for r in lab_rows:
|
| 757 |
+
data.append([r.get("sampleId", "β"), r.get("material", "β"), r.get("liquidLimit", "β"), r.get("plasticityIndex", "β"),
|
| 758 |
+
r.get("linearShrinkage", "β"), r.get("percentClay", "β"), r.get("percentSilt", "β"),
|
| 759 |
+
r.get("percentSand", "β"), r.get("percentGravel", "β")])
|
| 760 |
+
t = Table(data, repeatRows=1, colWidths=[40*mm, 40*mm, 15*mm, 15*mm, 12*mm, 12*mm, 12*mm, 12*mm, 12*mm])
|
| 761 |
+
t.setStyle(TableStyle([
|
| 762 |
+
("GRID", (0, 0), (-1, -1), 0.5, colors.grey),
|
| 763 |
+
("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#1F4E79")),
|
| 764 |
+
("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
|
| 765 |
+
]))
|
| 766 |
+
elems.append(t)
|
| 767 |
+
else:
|
| 768 |
+
elems.append(Paragraph("No laboratory results provided.", body))
|
| 769 |
+
|
| 770 |
+
# GSD plot include
|
| 771 |
+
gsd = site.get("GSD")
|
| 772 |
+
if gsd and isinstance(gsd, dict) and gsd.get("plot_png"):
|
| 773 |
+
elems.append(Spacer(1, 8))
|
| 774 |
+
elems.append(Paragraph("GSD Curve", h1))
|
| 775 |
+
pngdata = base64.b64decode(gsd["plot_png"])
|
| 776 |
+
img = io.BytesIO(pngdata)
|
| 777 |
+
rlimg = RLImage(img, width=160*mm, height=80*mm)
|
| 778 |
+
elems.append(rlimg)
|
| 779 |
+
|
| 780 |
+
# maps snapshot
|
| 781 |
+
if site.get("map_snapshot"):
|
| 782 |
+
elems.append(Spacer(1, 8))
|
| 783 |
+
elems.append(Paragraph("Map snapshot", h1))
|
| 784 |
+
try:
|
| 785 |
+
pngdata = base64.b64decode(site["map_snapshot"])
|
| 786 |
+
img = io.BytesIO(pngdata)
|
| 787 |
+
rlimg = RLImage(img, width=160*mm, height=100*mm)
|
| 788 |
+
elems.append(rlimg)
|
| 789 |
+
except Exception:
|
| 790 |
+
elems.append(Paragraph("Map snapshot could not be decoded.", body))
|
| 791 |
+
|
| 792 |
+
# Evaluation & recommendations
|
| 793 |
+
elems.append(Spacer(1, 8))
|
| 794 |
+
elems.append(Paragraph("4.0 EVALUATION OF GEOTECHNICAL PROPERTIES", h1))
|
| 795 |
+
# Use classifier decision if present
|
| 796 |
+
if site.get("classifier_decision_path"):
|
| 797 |
+
elems.append(Paragraph(site["classifier_decision_path"], body))
|
| 798 |
+
else:
|
| 799 |
+
elems.append(Paragraph("Classification details not provided.", body))
|
| 800 |
+
|
| 801 |
+
elems.append(Spacer(1, 8))
|
| 802 |
+
elems.append(Paragraph("5.0 RECOMMENDATIONS", h1))
|
| 803 |
+
recs = [
|
| 804 |
+
"Ensure positive surface drainage away from structures.",
|
| 805 |
+
"Remove unsuitable fill from beneath foundation footprints.",
|
| 806 |
+
"For lightly loaded structures, an allowable bearing pressure of 100 - 150 kPa may be used after further assessment.",
|
| 807 |
+
"For heavily loaded structures consider piled foundations if poor soils extend beyond 2 m."
|
| 808 |
+
]
|
| 809 |
+
for r in recs:
|
| 810 |
+
elems.append(Paragraph(r, body))
|
| 811 |
+
|
| 812 |
+
doc.build(elems)
|
| 813 |
+
pdf_bytes = buf.getvalue()
|
| 814 |
+
buf.close()
|
| 815 |
+
return pdf_bytes
|
| 816 |
+
|
| 817 |
+
|
| 818 |
+
@app.post("/api/report")
|
| 819 |
+
async def report_endpoint(site_id: str = Form(...), classification_only: bool = Form(False)):
|
| 820 |
+
if site_id not in SITES:
|
| 821 |
+
raise HTTPException(status_code=404, detail="Site not found")
|
| 822 |
+
site = SITES[site_id]
|
| 823 |
+
pdf_bytes = _build_pdf_report_bytes(site, include_classification_only=classification_only)
|
| 824 |
+
# write file to /tmp
|
| 825 |
+
out_path = f"/tmp/geomate_report_{site_id}.pdf"
|
| 826 |
+
with open(out_path, "wb") as f:
|
| 827 |
+
f.write(pdf_bytes)
|
| 828 |
+
return FileResponse(out_path, media_type="application/pdf", filename=f"{site.get('Site Name','report')}.pdf")
|
| 829 |
+
|
| 830 |
+
|
| 831 |
+
# End of backend file
|