summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--core/views/prospect.py2
-rw-r--r--parsers/imports.py23
-rw-r--r--parsers/locations.py245
-rw-r--r--parsers/survex.py232
4 files changed, 257 insertions, 245 deletions
diff --git a/core/views/prospect.py b/core/views/prospect.py
index 4021f5e..a9380c8 100644
--- a/core/views/prospect.py
+++ b/core/views/prospect.py
@@ -9,7 +9,7 @@ from django.shortcuts import render
import troggle.settings as settings
from troggle.core.models.caves import Area, Cave, Entrance, SurvexStation
from troggle.core.views.caves import caveKey
-from troggle.parsers.survex import MapLocations
+from troggle.parsers.locations import MapLocations
# from pathlib import Path
diff --git a/parsers/imports.py b/parsers/imports.py
index c1de034..50b45cf 100644
--- a/parsers/imports.py
+++ b/parsers/imports.py
@@ -4,6 +4,7 @@ from django.db import transaction
import troggle.parsers.caves
import troggle.parsers.drawings
+import troggle.parsers.locations
import troggle.parsers.logbooks
import troggle.parsers.people
import troggle.parsers.QMs
@@ -14,44 +15,37 @@ import troggle.settings
Used only by databaseReset.py and online controlpanel.
"""
-
def import_caves():
print("-- Importing Caves to ", end="")
print(django.db.connections.databases["default"]["NAME"])
troggle.parsers.caves.readcaves()
-
def import_people():
print("-- Importing People (folk.csv) to ", end="")
print(django.db.connections.databases["default"]["NAME"])
with transaction.atomic():
troggle.parsers.people.load_people_expos()
-
def import_surveyscans():
print("-- Importing Survey Scans")
with transaction.atomic():
troggle.parsers.scans.load_all_scans()
-
def import_logbooks():
print("-- Importing Logbooks")
with transaction.atomic():
troggle.parsers.logbooks.LoadLogbooks()
-
def import_logbook(year=2022):
print(f"-- Importing Logbook {year}")
with transaction.atomic():
troggle.parsers.logbooks.LoadLogbook(year)
-
def import_QMs():
print("-- Importing old QMs for 161, 204, 234 from CSV files")
with transaction.atomic():
troggle.parsers.QMs.Load_QMs()
-
def import_survex():
# when this import is moved to the top with the rest it all crashes horribly
print("-- Importing Survex and Entrance Positions")
@@ -62,26 +56,17 @@ def import_survex():
troggle.parsers.survex.LoadSurvexBlocks()
print(" - Survex entrances x/y/z Positions")
with transaction.atomic():
- troggle.parsers.survex.LoadPositions()
-
+ troggle.parsers.locations.LoadPositions()
def import_ents():
- # when this import is moved to the top with the rest it all crashes horribly
print(" - Survex entrances x/y/z Positions")
with transaction.atomic():
- import troggle.parsers.survex
-
- troggle.parsers.survex.LoadPositions()
-
+ troggle.parsers.locations.LoadPositions()
def import_loadpos():
- # when this import is moved to the top with the rest it all crashes horribly
- import troggle.parsers.survex
-
print(" - Survex entrances x/y/z Positions")
with transaction.atomic():
- troggle.parsers.survex.LoadPositions()
-
+ troggle.parsers.locations.LoadPositions()
def import_drawingsfiles():
print("-- Importing Drawings files")
diff --git a/parsers/locations.py b/parsers/locations.py
new file mode 100644
index 0000000..15fc841
--- /dev/null
+++ b/parsers/locations.py
@@ -0,0 +1,245 @@
+import copy
+import os
+import re
+import subprocess
+import sys
+import time
+from datetime import datetime, timezone
+from pathlib import Path
+
+
+import troggle.settings as settings
+from troggle.core.models.caves import Cave, Entrance
+from troggle.core.models.logbooks import QM
+from troggle.core.models.survex import SurvexBlock, SurvexDirectory, SurvexFile, SurvexPersonRole, SurvexStation, Wallet
+from troggle.core.models.troggle import DataIssue, Expedition
+from troggle.core.utils import chaosmonkey, get_process_memory
+from troggle.parsers.logbooks import GetCaveLookup
+from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner
+from troggle.parsers.survex import stash_data_issue, store_data_issues, ROOTBLOCK
+
+"""Uses the imported data to find the locations of the survey stations labelled as
+entrances
+"""
+
+todo = """
+- Pending a complete revision of how we handle GPS coordinates of entrances.
+"""
+
+class MapLocations(object):
+ """Class used only for identifying the entrance locations"""
+
+ p = [
+ ("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
+ ("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"),
+ ("vd1", "VD1", "Reference", "VD1 survey point"),
+ ("laser.kt114_96", "HSK", "Reference", "Hinterer Schwarzmooskogel trig point"),
+ ("2000", "Nipple", "Reference", "Nipple (Weiße Warze)"),
+ ("3000", "VSK", "Reference", "Vorderer Schwarzmooskogel summit"),
+ ("topcamp", "OTC", "Reference", "Old Top Camp"),
+ ("laser.0", "LSR0", "Reference", "Laser Point 0"),
+ ("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
+ ("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
+ ("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
+ ("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
+ ]
+
+ def points(self):
+ for ent in Entrance.objects.all():
+ if ent.best_station():
+ # print(f"{ent.filename}", end=", ")
+ try:
+ k = ent.caveandentrance_set.all()[0].cave
+ except:
+ message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}"
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+ continue # skip this entrance
+ try:
+ areaName = k.getArea().short_name
+ except:
+ message = f" ! Failed to get Area on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}"
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+ raise
+ self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent)))
+ message = f" - {len(self.p)} entrances linked to caves."
+ print(message)
+ return self.p
+
+ def __str__(self):
+ return f"{len(self.p)} map locations"
+
+poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
+
+
+def LoadPositions():
+ """First load the survex stations for entrances and fixed points (about 600) into the database.
+ Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
+ all survey point positions. Then lookup each position by name to see if we have it in the database
+ and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
+ If we don't have it in the database, print an error message and discard it.
+ """
+ svx_t = 0
+ d3d_t = 0
+
+ def runcavern3d():
+ outputdir = Path(str(f"{topdata}.svx")).parent
+
+ # print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
+ # format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
+
+ file3d = Path(f"{topdata}.3d")
+ try:
+ sp = subprocess.run(
+ [settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
+ capture_output=True,
+ check=False,
+ text=True,
+ ) # check=False means exception not raised
+ if sp.returncode != 0:
+ message = f" ! Error: cavern: creating {file3d} in runcavern3()"
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+
+ # find the errors in the 1623.log file
+ sp = subprocess.run(
+ ["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
+ ) # check=False means exception not raised
+ message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+
+ except:
+ message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+
+ if file3d.is_file():
+ message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+
+ if file3d.is_file(): # might be an old one though
+ try:
+ # print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
+ sp = subprocess.run(
+ [settings.SURVEXPORT, "--pos", f"{file3d}"],
+ cwd=settings.SURVEX_DATA,
+ capture_output=True,
+ check=False,
+ text=True,
+ )
+ if sp.returncode != 0:
+ print(
+ f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
+ + str(sp.stdout)
+ + "\n\nreturn code: "
+ + str(sp.returncode)
+ )
+ except:
+ message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+ else:
+ message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+
+ topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
+ print(f" - Generating a list of Pos from {topdata}.svx and then loading...")
+
+ found = 0
+ print("\n") # extra line because cavern overwrites the text buffer somehow
+ # cavern defaults to using same cwd as supplied input file
+
+ completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
+ cav_t = os.path.getmtime(completed_process.stdout.strip())
+
+ svxpath = topdata + ".svx"
+ d3dpath = topdata + ".3d"
+ pospath = topdata + ".pos"
+
+ svx_t = os.path.getmtime(svxpath)
+
+ if os.path.isfile(d3dpath):
+ # always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
+ d3d_t = os.path.getmtime(d3dpath)
+
+ now = time.time()
+ if not os.path.isfile(pospath):
+ runcavern3d()
+ if not os.path.isfile(d3dpath):
+ runcavern3d()
+ elif d3d_t - svx_t > 0: # stale, 3d older than svx file
+ runcavern3d()
+ elif now - d3d_t > 60 * 24 * 60 * 60: # >60 days old, re-run anyway
+ runcavern3d()
+ elif cav_t - d3d_t > 0: # new version of cavern
+ runcavern3d()
+
+ mappoints = {}
+ for pt in MapLocations().points():
+ svxid, number, point_type, label = pt
+ mappoints[svxid] = True
+
+ if not Path(pospath).is_file():
+ message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
+ stash_data_issue(parser="entrances", message=message)
+ print(message)
+ return
+
+ posfile = open(pospath)
+ posfile.readline() # Drop header
+ try:
+ survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
+ except:
+ try:
+ survexblockroot = SurvexBlock.objects.get(id=1)
+ except:
+ message = " ! FAILED to find root SurvexBlock"
+ print(message)
+ stash_data_issue(parser="entrances", message=message)
+ raise
+ for line in posfile.readlines():
+ r = poslineregex.match(line)
+ if r:
+ x, y, z, id = r.groups()
+ for sid in mappoints:
+ if id.endswith(sid):
+ blockpath = "." + id[: -len(sid)].strip(".")
+ # But why are we doing this? Why do we need the survexblock id for each of these ?
+ # ..because mostly they don't actually appear in any SVX file. We should match them up
+ # via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
+ if False:
+ try:
+ sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
+ if len(sbqs) == 1:
+ sbqs[0]
+ if len(sbqs) > 1:
+ message = f" ! MULTIPLE SurvexBlocks {len(sbqs):3} matching Entrance point {blockpath} {sid} '{id}'"
+ print(message)
+ stash_data_issue(parser="entrances", message=message)
+ sbqs[0]
+ elif len(sbqs) <= 0:
+ message = f" ! ZERO SurvexBlocks matching Entrance point {blockpath} {sid} '{id}'"
+ print(message)
+ stash_data_issue(parser="entrances", message=message)
+ except:
+ message = f" ! FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
+ print(message)
+ stash_data_issue(parser="entrances", message=message)
+ try:
+ ss = SurvexStation(name=id, block=survexblockroot)
+ ss.x = float(x)
+ ss.y = float(y)
+ ss.z = float(z)
+ ss.save()
+ found += 1
+ except:
+ message = f" ! FAIL to create SurvexStation Entrance point {blockpath} {sid}"
+ print(message)
+ stash_data_issue(parser="entrances", message=message)
+ raise
+ print(f" - {found} SurvexStation entrances found.")
+ store_data_issues()
diff --git a/parsers/survex.py b/parsers/survex.py
index 43a2639..cce3905 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -47,6 +47,13 @@ debugprinttrigger = "!"
dataissues = []
+class SurvexLeg:
+ """No longer a models.Model subclass, so no longer a database table"""
+
+ tape = 0.0
+ compass = 0.0
+ clino = 0.0
+
def stash_data_issue(parser=None, message=None, url=None, sb=None):
"""Avoid hitting the database for error messages until the end of the import"""
global dataissues
@@ -64,57 +71,6 @@ def store_data_issues():
url = get_offending_filename(sb)
DataIssue.objects.create(parser=parser, message=message, url=url)
dataissues = [] # in database now, so empty cache
-
-class MapLocations(object):
- """Class used only for identifying the entrance locations"""
-
- p = [
- ("laser.0_7", "BNase", "Reference", "Br&auml;uning Nase laser point"),
- ("226-96", "BZkn", "Reference", "Br&auml;uning Zinken trig point"),
- ("vd1", "VD1", "Reference", "VD1 survey point"),
- ("laser.kt114_96", "HSK", "Reference", "Hinterer Schwarzmooskogel trig point"),
- ("2000", "Nipple", "Reference", "Nipple (Wei&szlig;e Warze)"),
- ("3000", "VSK", "Reference", "Vorderer Schwarzmooskogel summit"),
- ("topcamp", "OTC", "Reference", "Old Top Camp"),
- ("laser.0", "LSR0", "Reference", "Laser Point 0"),
- ("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
- ("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
- ("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
- ("225-96", "BAlm", "Reference", "Br&auml;uning Alm trig point"),
- ]
-
- def points(self):
- for ent in Entrance.objects.all():
- if ent.best_station():
- # print(f"{ent.filename}", end=", ")
- try:
- k = ent.caveandentrance_set.all()[0].cave
- except:
- message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}"
- stash_data_issue(parser="entrances", message=message)
- print(message)
- continue # skip this entrance
- try:
- areaName = k.getArea().short_name
- except:
- message = f" ! Failed to get Area on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}"
- stash_data_issue(parser="entrances", message=message)
- print(message)
- raise
- self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent)))
- message = f" - {len(self.p)} entrances linked to caves."
- print(message)
- return self.p
-
- def __str__(self):
- return f"{len(self.p)} map locations"
-
-class SurvexLeg:
- """No longer a models.Model subclass, so no longer a database table"""
-
- tape = 0.0
- compass = 0.0
- clino = 0.0
def get_offending_filename(path):
"""Used to provide the URL for a line in the DataErrors page
@@ -2172,177 +2128,3 @@ def LoadSurvexBlocks():
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
print(" - Loaded All Survex Blocks.")
-
-poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
-
-
-def LoadPositions():
- """First load the survex stations for entrances and fixed points (about 600) into the database.
- Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
- all survey point positions. Then lookup each position by name to see if we have it in the database
- and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
- If we don't have it in the database, print an error message and discard it.
- """
- svx_t = 0
- d3d_t = 0
-
- def runcavern3d():
- outputdir = Path(str(f"{topdata}.svx")).parent
-
- # print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
- # format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
-
- file3d = Path(f"{topdata}.3d")
- try:
- sp = subprocess.run(
- [settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
- capture_output=True,
- check=False,
- text=True,
- ) # check=False means exception not raised
- if sp.returncode != 0:
- message = f" ! Error: cavern: creating {file3d} in runcavern3()"
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- # find the errors in the 1623.log file
- sp = subprocess.run(
- ["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
- ) # check=False means exception not raised
- message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- except:
- message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- if file3d.is_file():
- message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- if file3d.is_file(): # might be an old one though
- try:
- # print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
- sp = subprocess.run(
- [settings.SURVEXPORT, "--pos", f"{file3d}"],
- cwd=settings.SURVEX_DATA,
- capture_output=True,
- check=False,
- text=True,
- )
- if sp.returncode != 0:
- print(
- f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
- + str(sp.stdout)
- + "\n\nreturn code: "
- + str(sp.returncode)
- )
- except:
- message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
- stash_data_issue(parser="entrances", message=message)
- print(message)
- else:
- message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
- print(f" - Generating a list of Pos from {topdata}.svx and then loading...")
-
- found = 0
- print("\n") # extra line because cavern overwrites the text buffer somehow
- # cavern defaults to using same cwd as supplied input file
-
- completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
- cav_t = os.path.getmtime(completed_process.stdout.strip())
-
- svxpath = topdata + ".svx"
- d3dpath = topdata + ".3d"
- pospath = topdata + ".pos"
-
- svx_t = os.path.getmtime(svxpath)
-
- if os.path.isfile(d3dpath):
- # always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
- d3d_t = os.path.getmtime(d3dpath)
-
- now = time.time()
- if not os.path.isfile(pospath):
- runcavern3d()
- if not os.path.isfile(d3dpath):
- runcavern3d()
- elif d3d_t - svx_t > 0: # stale, 3d older than svx file
- runcavern3d()
- elif now - d3d_t > 60 * 24 * 60 * 60: # >60 days old, re-run anyway
- runcavern3d()
- elif cav_t - d3d_t > 0: # new version of cavern
- runcavern3d()
-
- mappoints = {}
- for pt in MapLocations().points():
- svxid, number, point_type, label = pt
- mappoints[svxid] = True
-
- if not Path(pospath).is_file():
- message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
- stash_data_issue(parser="entrances", message=message)
- print(message)
- return
-
- posfile = open(pospath)
- posfile.readline() # Drop header
- try:
- survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
- except:
- try:
- survexblockroot = SurvexBlock.objects.get(id=1)
- except:
- message = " ! FAILED to find root SurvexBlock"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- raise
- for line in posfile.readlines():
- r = poslineregex.match(line)
- if r:
- x, y, z, id = r.groups()
- for sid in mappoints:
- if id.endswith(sid):
- blockpath = "." + id[: -len(sid)].strip(".")
- # But why are we doing this? Why do we need the survexblock id for each of these ?
- # ..because mostly they don't actually appear in any SVX file. We should match them up
- # via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
- if False:
- try:
- sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
- if len(sbqs) == 1:
- sbqs[0]
- if len(sbqs) > 1:
- message = f" ! MULTIPLE SurvexBlocks {len(sbqs):3} matching Entrance point {blockpath} {sid} '{id}'"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- sbqs[0]
- elif len(sbqs) <= 0:
- message = f" ! ZERO SurvexBlocks matching Entrance point {blockpath} {sid} '{id}'"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- except:
- message = f" ! FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- try:
- ss = SurvexStation(name=id, block=survexblockroot)
- ss.x = float(x)
- ss.y = float(y)
- ss.z = float(z)
- ss.save()
- found += 1
- except:
- message = f" ! FAIL to create SurvexStation Entrance point {blockpath} {sid}"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- raise
- print(f" - {found} SurvexStation entrances found.")
- store_data_issues()