summaryrefslogtreecommitdiffstats
path: root/parsers/survex.py
blob: aebcd313ff367f572c3998db48dd203de8639cf3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
import sys
import os
import re
import time
from datetime import datetime, timedelta
from subprocess import call, Popen, PIPE

from django.utils.timezone import get_current_timezone
from django.utils.timezone import make_aware

import troggle.settings as settings
import troggle.core.models as models
import troggle.core.models_caves as models_caves
import troggle.core.models_survex as models_survex
from troggle.parsers.people import GetPersonExpeditionNameLookup
from troggle.core.views_caves import MapLocations


"""A 'survex block' is a *begin...*end set of cave data.
A 'survexscansfolder' is what we today call a "survey scans folder" or a "wallet".
"""

rx_braskets= re.compile(r"[()]")
rx_line_length = re.compile(r"[\d\-+.]+$")
survexlegsalllength = 0.0
survexlegsnumber = 0
survexblockroot = None
ROOTBLOCK = "rootblock"


def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
    global survexlegsalllength
    global survexlegsnumber
    # The try catches here need replacing as they are relatively expensive
    ls = sline.lower().split()
    #ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]]) 
    #ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])

#    survexleg = models_survex.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
    survexleg = models_survex.SurvexLeg()
    # this next fails for two surface survey svx files which use / for decimal point 
    # e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
    if stardata["type"] == "normal":
        tape = rx_braskets.sub("",ls[stardata["tape"]])
        tape = tape.replace("/",".")
        try:
            survexleg.tape = float(tape)
            survexlegsnumber += 1
        except ValueError:
            print(("! Tape misread in", survexblock.survexfile.path))
            print(("  Stardata:", stardata))
            print(("  Line:", ls))
            message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survex', message=message)
            survexleg.tape = 0
        try:
            lclino = ls[stardata["clino"]]
        except:
            print(("! Clino misread in", survexblock.survexfile.path))
            print(("  Stardata:", stardata))
            print(("  Line:", ls))
            message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survex', message=message)
            lclino = error
        try:
            lcompass = ls[stardata["compass"]]
        except:
            print(("! Compass misread in", survexblock.survexfile.path))
            print(("  Stardata:", stardata))
            print(("  Line:", ls))
            message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survex', message=message)
            lcompass = error
        if lclino == "up":
            survexleg.compass = 0.0
            survexleg.clino = 90.0
        elif lclino == "down":
            survexleg.compass = 0.0
            survexleg.clino = -90.0
        elif lclino == "-" or lclino == "level":
            try:
                survexleg.compass = float(lcompass)
            except ValueError:
                print(("! Compass misread in", survexblock.survexfile.path))
                print(("  Stardata:", stardata))
                print(("  Line:", ls))
                message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
                models.DataIssue.objects.create(parser='survex', message=message)
                survexleg.compass = 1000
            survexleg.clino = -90.0
        else:
            assert rx_line_length.match(lcompass), ls
            assert rx_line_length.match(lclino) and lclino != "-", ls
            survexleg.compass = float(lcompass)
            survexleg.clino = float(lclino)

        if cave:
            survexleg.cave = cave

        # only save proper legs
        # No need to save as we are measuring lengths only on parsing now.
        # delete the object so that django autosaving doesn't save it.
        survexleg = None
        #survexleg.save()

    itape = stardata.get("tape")
    if itape:
        try:
            survexblock.totalleglength += float(ls[itape])
            survexlegsalllength += float(ls[itape])
        except ValueError:
            print("! Length not added")
        # No need to save as we are measuring lengths only on parsing now.
        #survexblock.save()


def LoadSurvexEquate(survexblock, sline):
    #print sline #
    stations = sline.split()
    assert len(stations) > 1
    for station in stations:
        survexblock.MakeSurvexStation(station)


def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
    # do not import *data passage.. data which is LRUD not tape/compass/clino
    pass

# This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
# *DATA normal from to length gradient bearing ignore ignore ignore ignore
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}

rx_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
rx_ref     = re.compile(r'.*?ref.*?(\d+)\s*#\s*(X)?\s*(\d+)')
rx_star    = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
# years from 1960 to 2039
rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
# rx_starref = re.compile("""?x   # VERBOSE mode - can't get this to work
# ^\s*\*ref       # look for *ref at start of line
# [\s.:]*         # some spaces, stops or colons
# ((?:19[6789]\d)|(?:20[0123]\d)) # a date from 1960 to 2039 - captured as one field
# \s*#            # spaces then hash separator 
# ?\s*(X)         # optional X - captured
# ?\s*(.*?\d+.*?) # maybe a space, then at least one digit in the string - captured
# $(?i)""", re.X) # the end  (do the whole thing case insensitively)

rx_team    = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
rx_team_member        = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
rx_qm      = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')

insp = ""
callcount = 0
def RecursiveLoad(survexblock, survexfile, fin, textlines):
    """Follows the *include links in all the survex files from the root file 1623.svx
    and reads in the survex blocks, other data and the wallet references (survexscansfolder) as it
    goes. This part of the data import process is where the maximum memory is used and where it
    crashes on memory-constrained machines.
    """
    iblankbegins = 0
    text = [ ]
    stardata = stardatadefault
    teammembers = [ ]
    global insp
    global callcount
    global survexlegsnumber

    print(insp+"  - Reading file: " + survexblock.survexfile.path + " <> " + survexfile.path)
    stamp = datetime.now()
    lineno = 0
    
    sys.stderr.flush();
    callcount +=1
    if callcount >=10:
        callcount=0
        print(".", file=sys.stderr,end='')

    # Try to find the cave in the DB if not use the string as before
    path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
    if path_match:
        pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
        cave = models_caves.getCaveByReference(pos_cave)
        if cave:
            survexfile.cave = cave
    svxlines = ''
    svxlines = fin.read().splitlines()
    for svxline in svxlines:
        lineno += 1
        # break the line at the comment
        sline, comment = rx_comment.match(svxline.strip()).groups()
        # detect ref line pointing to the scans directory
        mref = comment and rx_ref.match(comment)
        if mref:
            yr, letterx, wallet = mref.groups()
            if not letterx:
                letterx = ""
            else:
                letterx = "X"
            if len(wallet)<2:
                wallet = "0" + wallet
            refscan = "%s#%s%s" % (yr, letterx, wallet )
            survexscansfolders = models_survex.SurvexScansFolder.objects.filter(walletname=refscan)
            if survexscansfolders:
                survexblock.survexscansfolder = survexscansfolders[0]
                survexblock.save()
            else:
                message = ' ! Wallet ; ref {} - NOT found in survexscansfolders {}'.format(refscan, survexblock.survexfile.path)
                print((insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)

        # This whole section should be moved if we can have *QM become a proper survex command
        # Spec of QM in SVX files, currently commented out need to add to survex
        # needs to match rx_qm
        # ;Serial number   grade(A/B/C/D/X)  nearest-station  resolution-station description
        # ;QM1	a	hobnob_hallway_2.42	hobnob-hallway_3.42	junction of keyhole passage
        # ;QM1	a	hobnob_hallway_2.42	-	junction of keyhole passage
        qmline = comment and rx_qm.match(comment)
        if qmline:
            qm_no = qmline.group(1)
            qm_grade = qmline.group(2)
            qm_from_section = qmline.group(3)
            qm_from_station = qmline.group(4)
            qm_resolve_section = qmline.group(6)
            qm_resolve_station = qmline.group(7)
            qm_notes = qmline.group(8)

            # print(insp+'Cave - %s' % survexfile.cave)
            # print(insp+'QM no %d' % int(qm_no))
            # print(insp+'QM grade %s' % qm_grade)
            # print(insp+'QM section %s' % qm_from_section)
            # print(insp+'QM station %s' % qm_from_station)
            # print(insp+'QM res section %s' % qm_resolve_section)
            # print(insp+'QM res station %s' % qm_resolve_station)
            # print(insp+'QM notes %s' % qm_notes)

            # If the QM isn't resolved (has a resolving station) then load it
            if not qm_resolve_section or qm_resolve_section != '-' or qm_resolve_section != 'None':
                from_section = models_survex.SurvexBlock.objects.filter(name=qm_from_section)
                # If we can find a section (survex note chunck, named)
                if len(from_section) > 0:
                    from_station = models_survex.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
                    # If we can find a from station then we have the nearest station and can import it
                    if len(from_station) > 0:
                        qm = models_caves.QM.objects.create(number=qm_no,
                                                      nearest_station=from_station[0],
                                                      grade=qm_grade.upper(),
                                                      location_description=qm_notes)
            else:
                # print(insp+' - QM found but resolved')
                pass

        if not sline:
            continue

        # detect the star ref command 
        mstar = rx_starref.match(sline)
        if mstar:
            yr,letterx,wallet = mstar.groups()
            if not letterx:
                letterx = ""
            else:
                letterx = "X"
            if len(wallet)<2:
                wallet = "0" + wallet
            assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
            assert (int(wallet)<100), "Wallet number more than 100: %s" % wallet
            refscan = "%s#%s%s" % (yr, letterx, wallet)
            survexscansfolders = models_survex.SurvexScansFolder.objects.filter(walletname=refscan)
            if survexscansfolders:
                survexblock.survexscansfolder = survexscansfolders[0]
                survexblock.save()
            else:
                message = ' ! Wallet *REF {} - NOT found in survexscansfolders {}'.format(refscan, survexblock.survexfile.path)
                print((insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)
            continue

        # detect the star command
        mstar = rx_star.match(sline)
        if not mstar:
            if "from" in stardata:
                LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
                pass
            elif stardata["type"] == "passage":
                LoadSurvexLinePassage(survexblock, stardata, sline, comment)
            #Missing "station" in stardata.
            continue

        # detect the star command
        cmd, line = mstar.groups()
        cmd = cmd.lower()
        if re.match("include$(?i)", cmd):
            includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line)))
            print((insp+'   - Include path found including - ' + includepath))
            # Try to find the cave in the DB if not use the string as before
            path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
            if path_match:
                pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
                # print(insp+pos_cave)
                cave = models_caves.getCaveByReference(pos_cave)
                if cave:
                    survexfile.cave = cave
            else:
                print((insp+'    - No match in DB (i) for %s, so loading..' % includepath))
            includesurvexfile = models_survex.SurvexFile(path=includepath)
            includesurvexfile.save()
            includesurvexfile.SetDirectory()
            if includesurvexfile.exists():
                survexblock.save()
                fininclude = includesurvexfile.OpenFile()
                insp += "> "
                RecursiveLoad(survexblock, includesurvexfile, fininclude, textlines)
                insp = insp[2:]

        elif re.match("begin$(?i)", cmd):
            if line:
                newsvxpath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
                # Try to find the cave in the DB if not use the string as before
                path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
                if path_match:
                    pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
                    # print(insp+pos_cave)
                    cave = models_caves.getCaveByReference(pos_cave)
                    if cave:
                        survexfile.cave = cave
                else:
                    print((insp+'    - No match (b) for %s' % newsvxpath))

                previousnlegs = survexlegsnumber
                name = line.lower()
                print((insp+'   - Begin found for: ' + name))
#                survexblockdown = models_survex.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexfile.cave, survexfile=survexfile, totalleglength=0.0)
                survexblockdown = models_survex.SurvexBlock(name=name, parent=survexblock, survexpath=survexblock.survexpath+"."+name, 
                        cave=survexfile.cave, survexfile=survexfile, legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
                survexblockdown.save()
                survexblock.save()
                survexblock = survexblockdown
                textlinesdown = [ ]
                insp += "> "
                RecursiveLoad(survexblockdown, survexfile, fin, textlinesdown)
                insp = insp[2:]
            else:
                iblankbegins += 1

        elif re.match("end$(?i)", cmd):
            if iblankbegins:
                iblankbegins -= 1
            else:
                # .text not used, using it for number of legs per block
                legsinblock = survexlegsnumber - previousnlegs
                print(insp+"LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,survexlegsnumber))
                survexblock.legsall = legsinblock
                survexblock.save()
                endstamp = datetime.now()
                timetaken = endstamp - stamp
                return

        elif re.match("date$(?i)", cmd):
            if len(line) == 10:
                survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
                expeditions = models.Expedition.objects.filter(year=line[:4])
                if expeditions:
                    assert len(expeditions) == 1
                    survexblock.expedition = expeditions[0]
                    survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
                    survexblock.save()

        elif re.match("team$(?i)", cmd):
            pass
            # print(insp+'   - Team found: ')
            mteammember = rx_team.match(line)
            if mteammember:
                for tm in rx_team_member.split(mteammember.group(2)):
                    if tm:
                        personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
                        if (personexpedition, tm) not in teammembers:
                            teammembers.append((personexpedition, tm))
                            personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
                            personrole.expeditionday = survexblock.expeditionday
                            if personexpedition:
                                personrole.person=personexpedition.person
                            personrole.save()

        elif cmd == "title":
            survextitle = models_survex.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexfile.cave)
            survextitle.save()
            pass

        elif cmd == "require":
            # should we check survex version available for processing?
            pass

        elif cmd == "data":
            ls = line.lower().split()
            stardata = { "type":ls[0] }
            for i in range(0, len(ls)):
                stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
            if ls[0] in ["normal", "cartesian", "nosurvey"]:
                assert (("from" in stardata and "to" in stardata) or "station" in stardata), line
            elif ls[0] == "default":
                stardata = stardatadefault
            else:
                assert ls[0] == "passage", line

        elif cmd == "equate":
            LoadSurvexEquate(survexblock, line)

        elif cmd == "set" and re.match("names(?i)", line):
            pass
        elif cmd == "flags":
            # Here we could set on/off 'splay', 'not splay', 'surface', 'not surface', or 'duplicate'
            # but this data is only used for sense-checking not to actually calculate anything important
            pass
        elif cmd == "fix":
            survexblock.MakeSurvexStation(line.split()[0])
        elif cmd in ["alias", "calibrate", "cs","entrance", "export", "case", 
                "declination", "infer","instrument", "sd", "units"]:
            # we ignore all these, which is fine.
            pass
        else:
            if cmd not in ["include", "data", "flags", "title", "set", "ref"]:
                message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
                print((insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)
            else:
                message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path)
                print((insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)

        endstamp = datetime.now()
        timetaken = endstamp - stamp
        # print(insp+'   - Time to process: ' + str(timetaken))

def LoadAllSurvexBlocks():
    global survexlegsalllength
    global survexlegsnumber

    print(' - Flushing All Survex Blocks...')

    models_survex.SurvexBlock.objects.all().delete()
    models_survex.SurvexFile.objects.all().delete()
    models_survex.SurvexDirectory.objects.all().delete()
    models_survex.SurvexEquate.objects.all().delete()
    #models_survex.SurvexLeg.objects.all().delete()
    models_survex.SurvexTitle.objects.all().delete()
    models_survex.SurvexPersonRole.objects.all().delete()
    models_survex.SurvexStation.objects.all().delete()

    print(" - Data flushed")
    # Clear the data issues as we are reloading
    models.DataIssue.objects.filter(parser='survex').delete()
    print(' - Loading All Survex Blocks...')
    
    print('  - redirecting stdout to loadsurvexblks.log...')
    stdout_orig = sys.stdout
    # Redirect sys.stdout to the file
    sys.stdout = open('loadsurvexblks.log', 'w')

    survexfile = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
    survexfile.save()
    survexfile.SetDirectory()

    #Load all
    # this is the first so id=1
    survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfile, 
            legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
    survexblockroot.save()
    fin = survexfile.OpenFile()
    textlines = [ ]
    # The real work starts here
    RecursiveLoad(survexblockroot, survexfile, fin, textlines)
    fin.close()
    survexblockroot.totalleglength = survexlegsalllength
    survexblockroot.legsall = survexlegsnumber
    #survexblockroot.text = "".join(textlines) these are all blank
    survexblockroot.save()
    
    # Close the file
    sys.stdout.close()
    print("+", file=sys.stderr)
    sys.stderr.flush();
    
    # Restore sys.stdout to our old saved file handler
    sys.stdout = stdout_orig
    print(" - total number of survex legs: {}m".format(survexlegsnumber))
    print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
    print(' - Loaded All Survex Blocks.')


poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")

def LoadPos():
    """Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of 
    all survey point positions. Then lookup each position by name to see if we have it in the database 
    and if we do, then save the x/y/z coordinates.
    If we don't have it in the database, print an error message and discard it.
    This is ONLY ever used for entrance and fixedpts locations for the prospecting map:
    about 600 points out of 32,000.
    """
    topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
    print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))

    # TO DO - remove the cache file apparatus. Not needed. Only laser points and entrances loaded now.
    
    # Be careful with the cache file. 
    # If LoadPos has been run before, 
    # but without cave import being run before,
    # then *everything* may be in the fresh  'not found' cache file. 
    
    # cachefile = settings.SURVEX_DATA + "posnotfound.cache"
    # notfoundbefore = {}
    # if os.path.isfile(cachefile):
        # # this is not a good test. 1623.svx may never change but *included files may have done.
        # # When the *include is unrolled, we will be able to get a proper timestamp to use
        # # and can increase the timeout from 3 days to 30 days.
        # updtsvx = os.path.getmtime(topdata + ".svx")
        # updtcache = os.path.getmtime(cachefile)
        # age = updtcache - updtsvx
        # print(('   svx: %s    cache: %s    not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) ))))
        
        # now = time.time()
        # if now - updtcache > 3*24*60*60:
            # print("   cache is more than 3 days old. Deleting.")
            # os.remove(cachefile)
        # elif age < 0 :
            # print("   cache is stale. Deleting.")
            # os.remove(cachefile)
        # else:
            # print("   cache is fresh. Reading...")
            # try:
                # with open(cachefile, "r") as f:
                    # for line in f:
                        # l = line.rstrip()
                        # if l in notfoundbefore:
                            # notfoundbefore[l] +=1 # should not be duplicates
                            # print(" DUPLICATE ", line, notfoundbefore[l])
                        # else:
                            # notfoundbefore[l] =1
            # except:
                # print("   FAILURE READ opening cache file %s" % (cachefile))
                # raise
            
    
#    notfoundnow =[]
    found = 0
    skip = {}
    print("\n") # extra line because cavern overwrites the text buffer somehow
    # cavern defaults to using same cwd as supplied input file
    call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
    call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
    #print("  - This next bit takes a while. Matching ~32,000 survey positions. Be patient...")

    mappoints = {}
    for pt in MapLocations().points():
        svxid, number,  point_type, label = pt
        mappoints[svxid]=True

    posfile = open("%s.pos" % (topdata))
    posfile.readline() #Drop header

    try:
        survexblockroot = models_survex.SurvexBlock.objects.get(name=ROOTBLOCK)
    except:
        try:
            survexblockroot = models_survex.SurvexBlock.objects.get(id=1)
        except:
            message = ' ! FAILED to find root SurvexBlock'
            print(message)
            models.DataIssue.objects.create(parser='survex', message=message)
            raise
    for line in posfile.readlines():
        r = poslineregex.match(line)
        if r:
            x, y, z, id = r.groups() 
            # if id in notfoundbefore:
                # skip[id] = 1
            # else:
            for sid in mappoints:
                if id.endswith(sid):
#                    notfoundnow.append(id)
                    # Now that we don't import any stations, we create it rather than look it up
                    # ss = models_survex.SurvexStation.objects.lookup(id)
                    
                    # need to set block_id which means doing a search on all the survex blocks..
                    # remove dot at end and add one at beginning
                    blockpath = "." + id[:-len(sid)].strip(".")
                    try:
                        sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
                        if len(sbqs)==1:
                            sb = sbqs[0]
                        if len(sbqs)>1:
                            message = ' ! MULTIPLE SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
                            print(message)
                            models.DataIssue.objects.create(parser='survex', message=message)
                            sb = sbqs[0]
                        elif len(sbqs)<=0:
                            message = ' ! ZERO SurvexBlocks matching Entrance point {} {}'.format(blockpath, sid)
                            print(message)
                            models.DataIssue.objects.create(parser='survex', message=message)
                            sb = survexblockroot
                    except:
                        message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
                        print(message)
                        models.DataIssue.objects.create(parser='survex', message=message)
                    try:
                        ss = models_survex.SurvexStation(name=id, block=sb)
                        ss.x = float(x)
                        ss.y = float(y)
                        ss.z = float(z) 
                        ss.save()
                        found += 1
                    except:
                        message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
                        print(message)
                        models.DataIssue.objects.create(parser='survex', message=message)
                        raise

    #print(" - %s failed lookups of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip)))
    print(" - {} SurvexStation entrances found.".format(found))

    # if found > 10: # i.e. a previous cave import has been done
        # try:
            # with open(cachefile, "w") as f:
                # c = len(notfoundnow)+len(skip)
                # for i in notfoundnow:
                    # pass #f.write("%s\n" % i)
                # for j in skip:
                    # pass #f.write("%s\n" % j) # NB skip not notfoundbefore
                # print(('   Not-found cache file written: %s entries' % c))
        # except:
            # print("   FAILURE WRITE opening cache file %s" % (cachefile))
            # raise