summaryrefslogtreecommitdiffstats
path: root/parsers/survex.py
diff options
context:
space:
mode:
authorPhilip Sargent <philip.sargent@klebos.com>2020-06-16 19:27:32 +0100
committerPhilip Sargent <philip.sargent@klebos.com>2020-06-16 19:27:32 +0100
commit94e5a06a15207c3b0b3ca7af569a72e03c7cea51 (patch)
tree7725319deac5128fd781df16e6aa8f19361abd4e /parsers/survex.py
parent8fc0ba136fe2aba853474b0ea09a6736455c28fd (diff)
downloadtroggle-94e5a06a15207c3b0b3ca7af569a72e03c7cea51.tar.gz
troggle-94e5a06a15207c3b0b3ca7af569a72e03c7cea51.tar.bz2
troggle-94e5a06a15207c3b0b3ca7af569a72e03c7cea51.zip
clean up survexlegs
Diffstat (limited to 'parsers/survex.py')
-rw-r--r--parsers/survex.py187
1 files changed, 96 insertions, 91 deletions
diff --git a/parsers/survex.py b/parsers/survex.py
index 8908f67..9664618 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -31,10 +31,11 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
global survexlegsnumber
# The try catches here need replacing as they are relatively expensive
ls = sline.lower().split()
- ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
- ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
+ #ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]])
+ #ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
- survexleg = models_survex.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
+# survexleg = models_survex.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
+ survexleg = models_survex.SurvexLeg()
# this next fails for two surface survey svx files which use / for decimal point
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
if stardata["type"] == "normal":
@@ -326,7 +327,9 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
previousnlegs = survexlegsnumber
name = line.lower()
print((insp+' - Begin found for: ' + name))
- survexblockdown = models_survex.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
+# survexblockdown = models_survex.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
+ survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock, survexpath=survexblock.survexpath+"."+name,
+ cave=survexfile.cave, survexfile=survexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
survexblockdown.save()
survexblock.save()
survexblock = survexblockdown
@@ -344,7 +347,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
# .text not used, using it for number of legs per block
legsinblock = survexlegsnumber - previousnlegs
print(insp+"LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,survexlegsnumber))
- survexblock.text = str(legsinblock)
+ survexblock.legsall = legsinblock
survexblock.save()
endstamp = datetime.now()
timetaken = endstamp - stamp
@@ -436,7 +439,7 @@ def LoadAllSurvexBlocks():
models_survex.SurvexFile.objects.all().delete()
models_survex.SurvexDirectory.objects.all().delete()
models_survex.SurvexEquate.objects.all().delete()
- models_survex.SurvexLeg.objects.all().delete()
+ #models_survex.SurvexLeg.objects.all().delete()
models_survex.SurvexTitle.objects.all().delete()
models_survex.SurvexPersonRole.objects.all().delete()
models_survex.SurvexStation.objects.all().delete()
@@ -457,7 +460,8 @@ def LoadAllSurvexBlocks():
#Load all
# this is the first so id=1
- survexblockroot = models_survex.SurvexBlock(name="rootblock", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0)
+ survexblockroot = models_survex.SurvexBlock(name="rootblock", survexpath="", cave=None, survexfile=survexfile,
+ legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
survexblockroot.save()
fin = survexfile.OpenFile()
textlines = [ ]
@@ -465,7 +469,7 @@ def LoadAllSurvexBlocks():
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
fin.close()
survexblockroot.totalleglength = survexlegsalllength
- survexblockroot.text = str(survexlegsnumber)
+ survexblockroot.legsall = survexlegsnumber
#survexblockroot.text = "".join(textlines) these are all blank
survexblockroot.save()
@@ -501,41 +505,41 @@ def LoadPos():
# but without cave import being run before,
# then *everything* may be in the fresh 'not found' cache file.
- cachefile = settings.SURVEX_DATA + "posnotfound.cache"
- notfoundbefore = {}
- if os.path.isfile(cachefile):
- # this is not a good test. 1623.svx may never change but *included files may have done.
- # When the *include is unrolled, we will be able to get a proper timestamp to use
- # and can increase the timeout from 3 days to 30 days.
- updtsvx = os.path.getmtime(topdata + ".svx")
- updtcache = os.path.getmtime(cachefile)
- age = updtcache - updtsvx
- print((' svx: %s cache: %s not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) ))))
+ # cachefile = settings.SURVEX_DATA + "posnotfound.cache"
+ # notfoundbefore = {}
+ # if os.path.isfile(cachefile):
+ # # this is not a good test. 1623.svx may never change but *included files may have done.
+ # # When the *include is unrolled, we will be able to get a proper timestamp to use
+ # # and can increase the timeout from 3 days to 30 days.
+ # updtsvx = os.path.getmtime(topdata + ".svx")
+ # updtcache = os.path.getmtime(cachefile)
+ # age = updtcache - updtsvx
+ # print((' svx: %s cache: %s not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) ))))
- now = time.time()
- if now - updtcache > 3*24*60*60:
- print(" cache is more than 3 days old. Deleting.")
- os.remove(cachefile)
- elif age < 0 :
- print(" cache is stale. Deleting.")
- os.remove(cachefile)
- else:
- print(" cache is fresh. Reading...")
- try:
- with open(cachefile, "r") as f:
- for line in f:
- l = line.rstrip()
- if l in notfoundbefore:
- notfoundbefore[l] +=1 # should not be duplicates
- print(" DUPLICATE ", line, notfoundbefore[l])
- else:
- notfoundbefore[l] =1
- except:
- print(" FAILURE READ opening cache file %s" % (cachefile))
- raise
+ # now = time.time()
+ # if now - updtcache > 3*24*60*60:
+ # print(" cache is more than 3 days old. Deleting.")
+ # os.remove(cachefile)
+ # elif age < 0 :
+ # print(" cache is stale. Deleting.")
+ # os.remove(cachefile)
+ # else:
+ # print(" cache is fresh. Reading...")
+ # try:
+ # with open(cachefile, "r") as f:
+ # for line in f:
+ # l = line.rstrip()
+ # if l in notfoundbefore:
+ # notfoundbefore[l] +=1 # should not be duplicates
+ # print(" DUPLICATE ", line, notfoundbefore[l])
+ # else:
+ # notfoundbefore[l] =1
+ # except:
+ # print(" FAILURE READ opening cache file %s" % (cachefile))
+ # raise
- notfoundnow =[]
+# notfoundnow =[]
found = 0
skip = {}
print("\n") # extra line because cavern overwrites the text buffer somehow
@@ -557,60 +561,61 @@ def LoadPos():
r = poslineregex.match(line)
if r:
x, y, z, id = r.groups()
- if id in notfoundbefore:
- skip[id] = 1
- else:
- for sid in mappoints:
- if id.endswith(sid):
- notfoundnow.append(id)
- # Now that we don't import any stations, we create it rather than look it up
- # ss = models_survex.SurvexStation.objects.lookup(id)
-
- # need to set block_id which means doing a search on all the survex blocks..
- # remove dot at end and add one at beginning
- blockpath = "." + id[:-len(sid)].strip(".")
- try:
- sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
- if len(sbqs)==1:
- sb = sbqs[0]
- if len(sbqs)>1:
- message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
- print(message)
- models.DataIssue.objects.create(parser='survex', message=message)
- sb = sbqs[0]
- elif len(sbqs)<=0:
- message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
- print(message)
- models.DataIssue.objects.create(parser='survex', message=message)
- sb = survexblockroot
- except:
- message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
+ # if id in notfoundbefore:
+ # skip[id] = 1
+ # else:
+ for sid in mappoints:
+ if id.endswith(sid):
+# notfoundnow.append(id)
+ # Now that we don't import any stations, we create it rather than look it up
+ # ss = models_survex.SurvexStation.objects.lookup(id)
+
+ # need to set block_id which means doing a search on all the survex blocks..
+ # remove dot at end and add one at beginning
+ blockpath = "." + id[:-len(sid)].strip(".")
+ try:
+ sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
+ if len(sbqs)==1:
+ sb = sbqs[0]
+ if len(sbqs)>1:
+ message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
- try:
- ss = models_survex.SurvexStation(name=id, block=sb)
- ss.x = float(x)
- ss.y = float(y)
- ss.z = float(z)
- ss.save()
- found += 1
- except:
- message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
+ sb = sbqs[0]
+ elif len(sbqs)<=0:
+ message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
print(message)
models.DataIssue.objects.create(parser='survex', message=message)
- raise
+ sb = survexblockroot
+ except:
+ message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
+ print(message)
+ models.DataIssue.objects.create(parser='survex', message=message)
+ try:
+ ss = models_survex.SurvexStation(name=id, block=sb)
+ ss.x = float(x)
+ ss.y = float(y)
+ ss.z = float(z)
+ ss.save()
+ found += 1
+ except:
+ message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
+ print(message)
+ models.DataIssue.objects.create(parser='survex', message=message)
+ raise
#print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
-
- if found > 10: # i.e. a previous cave import has been done
- try:
- with open(cachefile, "w") as f:
- c = len(notfoundnow)+len(skip)
- for i in notfoundnow:
- pass #f.write("%s\n" % i)
- for j in skip:
- pass #f.write("%s\n" % j) # NB skip not notfoundbefore
- print((' Not-found cache file written: %s entries' % c))
- except:
- print(" FAILURE WRITE opening cache file %s" % (cachefile))
- raise \ No newline at end of file
+ print(" - {} SurvexStation entrances found.".format(found))
+
+ # if found > 10: # i.e. a previous cave import has been done
+ # try:
+ # with open(cachefile, "w") as f:
+ # c = len(notfoundnow)+len(skip)
+ # for i in notfoundnow:
+ # pass #f.write("%s\n" % i)
+ # for j in skip:
+ # pass #f.write("%s\n" % j) # NB skip not notfoundbefore
+ # print((' Not-found cache file written: %s entries' % c))
+ # except:
+ # print(" FAILURE WRITE opening cache file %s" % (cachefile))
+ # raise \ No newline at end of file