summaryrefslogtreecommitdiffstats
path: root/parsers/survex.py
diff options
context:
space:
mode:
Diffstat (limited to 'parsers/survex.py')
-rw-r--r--parsers/survex.py63
1 files changed, 35 insertions, 28 deletions
diff --git a/parsers/survex.py b/parsers/survex.py
index 3dc9d16..35e09b8 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -1,18 +1,18 @@
-import troggle.settings as settings
-import troggle.core.models as models
-import troggle.settings as settings
-
+import sys
+import os
+import re
+import time
+from datetime import datetime, timedelta
from subprocess import call, Popen, PIPE
-from troggle.parsers.people import GetPersonExpeditionNameLookup
from django.utils.timezone import get_current_timezone
from django.utils.timezone import make_aware
-import re
-import os
-import time
-from datetime import datetime, timedelta
-import sys
+import troggle.settings as settings
+import troggle.core.models as models
+from troggle.parsers.people import GetPersonExpeditionNameLookup
+from troggle.core.views_caves import MapLocations
+
"""A 'survex block' is a *begin...*end set of cave data.
A 'survexscansfolder' is what we today call a "survey scans folder" or a "wallet".
@@ -227,7 +227,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
# print(insp+'QM notes %s' % qm_notes)
# If the QM isn't resolved (has a resolving station) then load it
- if not qm_resolve_section or qm_resolve_section != '-' or qm_resolve_section is not 'None':
+ if not qm_resolve_section or qm_resolve_section != '-' or qm_resolve_section != 'None':
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
# If we can find a section (survex note chunck, named)
if len(from_section) > 0:
@@ -474,7 +474,6 @@ def LoadAllSurvexBlocks():
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
-
def LoadPos():
"""Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
all survey point positions. Then lookup each position by name to see if we have it in the database
@@ -485,6 +484,8 @@ def LoadPos():
"""
topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))
+
+ # TO DO - remove the cache file apparatus. Not needed. Only laser points and entrances loaded now.
# Be careful with the cache file.
# If LoadPos has been run before,
@@ -532,27 +533,34 @@ def LoadPos():
# cavern defaults to using same cwd as supplied input file
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
- print(" - This next bit takes a while. Matching ~32,000 survey positions. Be patient...")
+ #print(" - This next bit takes a while. Matching ~32,000 survey positions. Be patient...")
+
+ mappoints = {}
+ for pt in MapLocations().points():
+ svxid, number, point_type, label = pt
+ mappoints[svxid]=True
posfile = open("%s.pos" % (topdata))
posfile.readline() #Drop header
for line in posfile.readlines():
r = poslineregex.match(line)
if r:
- x, y, z, name = r.groups() # easting, northing, altitude
- if name in notfoundbefore:
- skip[name] = 1
+ x, y, z, id = r.groups() # easting, northing, altitude, survexstation
+ if id in notfoundbefore:
+ skip[id] = 1
else:
- try:
- ss = models.SurvexStation.objects.lookup(name)
- ss.x = float(x)
- ss.y = float(y)
- ss.z = float(z)
- ss.save()
- found += 1
- except:
- notfoundnow.append(name)
- print(" - %s stations not found in lookup of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
+ for sid in mappoints:
+ if id.endswith(sid):
+ try:
+ ss = models.SurvexStation.objects.lookup(id)
+ ss.x = float(x)
+ ss.y = float(y)
+ ss.z = float(z)
+ ss.save()
+ found += 1
+ except:
+ notfoundnow.append(id)
+ print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
if found > 10: # i.e. a previous cave import has been done
try:
@@ -565,5 +573,4 @@ def LoadPos():
print((' Not-found cache file written: %s entries' % c))
except:
print(" FAILURE WRITE opening cache file %s" % (cachefile))
- raise
-
+ raise \ No newline at end of file