summaryrefslogtreecommitdiffstats
path: root/parsers/survex.py
diff options
context:
space:
mode:
Diffstat (limited to 'parsers/survex.py')
-rw-r--r--parsers/survex.py232
1 files changed, 7 insertions, 225 deletions
diff --git a/parsers/survex.py b/parsers/survex.py
index 43a2639..cce3905 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -47,6 +47,13 @@ debugprinttrigger = "!"
dataissues = []
+class SurvexLeg:
+ """No longer a models.Model subclass, so no longer a database table"""
+
+ tape = 0.0
+ compass = 0.0
+ clino = 0.0
+
def stash_data_issue(parser=None, message=None, url=None, sb=None):
"""Avoid hitting the database for error messages until the end of the import"""
global dataissues
@@ -64,57 +71,6 @@ def store_data_issues():
url = get_offending_filename(sb)
DataIssue.objects.create(parser=parser, message=message, url=url)
dataissues = [] # in database now, so empty cache
-
-class MapLocations(object):
- """Class used only for identifying the entrance locations"""
-
- p = [
- ("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
- ("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"),
- ("vd1", "VD1", "Reference", "VD1 survey point"),
- ("laser.kt114_96", "HSK", "Reference", "Hinterer Schwarzmooskogel trig point"),
- ("2000", "Nipple", "Reference", "Nipple (Weiße Warze)"),
- ("3000", "VSK", "Reference", "Vorderer Schwarzmooskogel summit"),
- ("topcamp", "OTC", "Reference", "Old Top Camp"),
- ("laser.0", "LSR0", "Reference", "Laser Point 0"),
- ("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
- ("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
- ("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
- ("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
- ]
-
- def points(self):
- for ent in Entrance.objects.all():
- if ent.best_station():
- # print(f"{ent.filename}", end=", ")
- try:
- k = ent.caveandentrance_set.all()[0].cave
- except:
- message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}"
- stash_data_issue(parser="entrances", message=message)
- print(message)
- continue # skip this entrance
- try:
- areaName = k.getArea().short_name
- except:
- message = f" ! Failed to get Area on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}"
- stash_data_issue(parser="entrances", message=message)
- print(message)
- raise
- self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent)))
- message = f" - {len(self.p)} entrances linked to caves."
- print(message)
- return self.p
-
- def __str__(self):
- return f"{len(self.p)} map locations"
-
-class SurvexLeg:
- """No longer a models.Model subclass, so no longer a database table"""
-
- tape = 0.0
- compass = 0.0
- clino = 0.0
def get_offending_filename(path):
"""Used to provide the URL for a line in the DataErrors page
@@ -2172,177 +2128,3 @@ def LoadSurvexBlocks():
# print(f" - TIME: {duration:7.2f} s", file=sys.stderr)
print(" - Loaded All Survex Blocks.")
-
-poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
-
-
-def LoadPositions():
- """First load the survex stations for entrances and fixed points (about 600) into the database.
- Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
- all survey point positions. Then lookup each position by name to see if we have it in the database
- and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
- If we don't have it in the database, print an error message and discard it.
- """
- svx_t = 0
- d3d_t = 0
-
- def runcavern3d():
- outputdir = Path(str(f"{topdata}.svx")).parent
-
- # print(" - Regenerating stale cavern .log and .3d for '{}'\n days old: {:.1f} {:.1f} {:.1f}".
- # format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
-
- file3d = Path(f"{topdata}.3d")
- try:
- sp = subprocess.run(
- [settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
- capture_output=True,
- check=False,
- text=True,
- ) # check=False means exception not raised
- if sp.returncode != 0:
- message = f" ! Error: cavern: creating {file3d} in runcavern3()"
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- # find the errors in the 1623.log file
- sp = subprocess.run(
- ["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
- ) # check=False means exception not raised
- message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- except:
- message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- if file3d.is_file():
- message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- if file3d.is_file(): # might be an old one though
- try:
- # print(" - Regenerating {} {}.3d in {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
- sp = subprocess.run(
- [settings.SURVEXPORT, "--pos", f"{file3d}"],
- cwd=settings.SURVEX_DATA,
- capture_output=True,
- check=False,
- text=True,
- )
- if sp.returncode != 0:
- print(
- f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
- + str(sp.stdout)
- + "\n\nreturn code: "
- + str(sp.returncode)
- )
- except:
- message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
- stash_data_issue(parser="entrances", message=message)
- print(message)
- else:
- message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
- stash_data_issue(parser="entrances", message=message)
- print(message)
-
- topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
- print(f" - Generating a list of Pos from {topdata}.svx and then loading...")
-
- found = 0
- print("\n") # extra line because cavern overwrites the text buffer somehow
- # cavern defaults to using same cwd as supplied input file
-
- completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
- cav_t = os.path.getmtime(completed_process.stdout.strip())
-
- svxpath = topdata + ".svx"
- d3dpath = topdata + ".3d"
- pospath = topdata + ".pos"
-
- svx_t = os.path.getmtime(svxpath)
-
- if os.path.isfile(d3dpath):
- # always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
- d3d_t = os.path.getmtime(d3dpath)
-
- now = time.time()
- if not os.path.isfile(pospath):
- runcavern3d()
- if not os.path.isfile(d3dpath):
- runcavern3d()
- elif d3d_t - svx_t > 0: # stale, 3d older than svx file
- runcavern3d()
- elif now - d3d_t > 60 * 24 * 60 * 60: # >60 days old, re-run anyway
- runcavern3d()
- elif cav_t - d3d_t > 0: # new version of cavern
- runcavern3d()
-
- mappoints = {}
- for pt in MapLocations().points():
- svxid, number, point_type, label = pt
- mappoints[svxid] = True
-
- if not Path(pospath).is_file():
- message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
- stash_data_issue(parser="entrances", message=message)
- print(message)
- return
-
- posfile = open(pospath)
- posfile.readline() # Drop header
- try:
- survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK)
- except:
- try:
- survexblockroot = SurvexBlock.objects.get(id=1)
- except:
- message = " ! FAILED to find root SurvexBlock"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- raise
- for line in posfile.readlines():
- r = poslineregex.match(line)
- if r:
- x, y, z, id = r.groups()
- for sid in mappoints:
- if id.endswith(sid):
- blockpath = "." + id[: -len(sid)].strip(".")
- # But why are we doing this? Why do we need the survexblock id for each of these ?
- # ..because mostly they don't actually appear in any SVX file. We should match them up
- # via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
- if False:
- try:
- sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
- if len(sbqs) == 1:
- sbqs[0]
- if len(sbqs) > 1:
- message = f" ! MULTIPLE SurvexBlocks {len(sbqs):3} matching Entrance point {blockpath} {sid} '{id}'"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- sbqs[0]
- elif len(sbqs) <= 0:
- message = f" ! ZERO SurvexBlocks matching Entrance point {blockpath} {sid} '{id}'"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- except:
- message = f" ! FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- try:
- ss = SurvexStation(name=id, block=survexblockroot)
- ss.x = float(x)
- ss.y = float(y)
- ss.z = float(z)
- ss.save()
- found += 1
- except:
- message = f" ! FAIL to create SurvexStation Entrance point {blockpath} {sid}"
- print(message)
- stash_data_issue(parser="entrances", message=message)
- raise
- print(f" - {found} SurvexStation entrances found.")
- store_data_issues()