summaryrefslogtreecommitdiffstats
path: root/parsers/locations.py
blob: 92df13e458e29f9665d77555c5c3b7e6f0b662c4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
import copy
import os
import re
import subprocess
import sys
import time
from datetime import datetime, timezone
from pathlib import Path


import troggle.settings as settings
from troggle.core.models.caves import Cave, Entrance
from troggle.core.models.logbooks import QM
from troggle.core.models.survex import SurvexBlock, SurvexFile, SurvexPersonRole, SurvexStation
from troggle.core.models.wallets import Wallet
from troggle.core.models.troggle import DataIssue, Expedition
from troggle.core.utils import chaosmonkey, get_process_memory
from troggle.parsers.logbooks import GetCaveLookup
from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner
from troggle.parsers.survex import stash_data_issue, store_data_issues, ROOTBLOCK

"""Uses the imported data to find the locations of the survey stations labelled as 
entrances
"""

todo = """
-    
"""

class MapLocations(object):
    """Class used only for identifying the entrance locations
    Formerly used to put locations on a prospecting map
    
    We don't need these map locations any more ?!
    They would only be used in addition to entrances going onto a map display"""

    fp = [
        ("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"),
        ("226-96", "BZkn", "Reference", "Bräuning Zinken trig point"),
        ("vd1", "VD1", "Reference", "VD1 survey point"),
        ("laser.kt114_96", "HSK", "Reference", "Hinterer Schwarzmooskogel trig point"),
        ("2000", "Nipple", "Reference", "Nipple (Weiße Warze)"),
        ("3000", "VSK", "Reference", "Vorderer Schwarzmooskogel summit"),
        ("topcamp", "OTC", "Reference", "Old Top Camp"),
        ("laser.0", "LSR0", "Reference", "Laser Point 0"),
        ("laser.0_1", "LSR1", "Reference", "Laser Point 0/1"),
        ("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"),
        ("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"),
        ("225-96", "BAlm", "Reference", "Bräuning Alm trig point"),
    ] # 12 fixed points
    
    # Where we have a lot of survey stations which have not been assigned to proper Entrances and Caves yet, but we
    # want to see them in the /stations report page:
    
   

    
    p = []

    def points(self):
        nullent = Entrance.objects.all()[0] # hope this doesn't barf
        pending = [
            ("1623.p2013-cucc-pit", "no ent", False, nullent),
            
            ("1626.p2013-cucc-draftyholes", "no ent", False, nullent),
            ("1626.p2013-cucc-pitarea", "no ent", False, nullent),
            ("1626.p2013-cucc-goodpit", "no ent", False, nullent),
            ("1626.p2013-cucc-goodpit-DUP", "no ent", False, nullent),
            ("1626.p2023-cucc-22mpit", "no ent", False, nullent),
            ("1626.p2013-cucc-lineofpits", "no ent", False, nullent),
            ("1626.p2013-cucc-12mpit", "no ent", False, nullent),
            ("1626.p2013-cucc-20mpit", "no ent", False, nullent),
            ("1626.p2013-cucc-2s-drop", "no ent", False, nullent),
            # to add in the rest of the entrances from the 2013 prospecting trip

            # fix p2013-cucc-01 reference 486512 284436 1825 ; 11:02
            # *fix p2013-cucc-01-DUP  reference 486519 284448 1823

            # *fix p2013-cucc-01B reference 486492 284508 1828 ; 11:37

            # *fix p2013-cucc-01cp reference 486507 284580 1843 ; 11:44

            # *fix p2013-cucc-rift reference 486410 284622 1835 ; 12:00

            # *fix p2013-cucc-slope reference 486485 284591 1844 ; 11:51

            # *fix p2013-cucc-snowplug2 reference 486042 284531 1813 ; 13:03

            # *fix p2013-cucc-draft4pit  reference 486374 284438 1786 ; 13:58 ; * GOOD
            # *fix p2013-cucc-draft4pit-DUP  reference 486375 284447 1774

            # *fix p2013-cucc-DraftHole reference 486277 284716 1856 ; 12:28

            # *fix p2013-cucc-setofsnowholes reference 486060 284584 1848 ; 12:51

            # *fix p2013-cucc-stotp20 reference 486176 284738 1864 ; 12:37

            # *fix p2013-cucc-snowplug reference 486349 284666 1833 ; 12:13

            # *fix p2013-cucc-draft3pit reference 486332 284428 1769

            # *fix p2013-cucc-shelter1 reference 486533 284546 1831

            # *fix p2013-cucc-draft2pit reference 486333 284485 1779

            # ;*fix p2013-cucc-01-DUP ; marked as  duplicate
            # ;*fix p2013-cucc-draft4pit-DUP ; marked as duplicate

            ("1626.p2014-ms-01", "no ent", False, nullent),
            ("1626.p2014-ms-02", "no ent", False, nullent),
            ("1626.p2014-ms-03", "no ent", False, nullent),
            ("1626.p2014-ms-04", "no ent", False, nullent),
            ("1626.p2014-ms-05", "no ent", False, nullent),
            ("1626.p2014-ms-06", "no ent", False, nullent),
            ("1626.p2014-ms-07", "no ent", False, nullent),
     
        ]
        self.p = pending
        for ent in Entrance.objects.all():
            for st, ent_type in {ent.other_station: "other", ent.tag_station: "tag"}.items():
                if st != "":
                    self.p.append((st, str(ent), ent.needs_surface_work(), ent))
        store_data_issues()
        message = f" -  {len(self.p)} Survey stations found on Entrance objects - not yet validated against survex .pos file."
        print(message)
        return self.p

    def __str__(self):
        return f"{len(self.p)} ent locations"


def validate_entrance_stations(ent=None):
    """Now that we have the located positions, we can check if the Entrances had correct stations
    """
    bads = 0
    good = 0
    url="/caves" # fallback
    
    def station_lower_case(station):
        nonlocal url
        if not station:
            return
        so = SurvexStation.objects.filter(name=station.lower())
        if so.count() == 1:
           message = f"X - Entrance {ent}  station '{station}' should be '{station.lower()}'"
           stash_data_issue(parser="positions", message=message, url=url)
           print(message)
            
    def validate_ent(ent):
        """For each of the two station strings in an Entrance object,
        validate each string as referring to a valid SurvexStation object.
        But our list of created SurvexStation objects is created by taking a list of strings and using them
        to select from lines in a .pos file - so this is unnecessarily indirect.
        """
        nonlocal bads
        nonlocal good
        # {% url "editentrance" ent.entrance.url_parent cave.slug ent.entrance.slug %}
        # e.g. url = f"/1623/101/1623-101:{ent}_entrance_edit"
        cavelist = ent.cavelist()
        if len(cavelist) == 1:
            cave = cavelist[0]
            url = f"/{cave.url}"     
        elif len(cavelist) > 1:
            cave = cavelist[-1] # set to last in list
            url = f"/{cave.url}"
        else:
            print(f"BUGGER bad cave  '{cavelist}' on Entrance object {ent} ")
            url="/caves"
        for st, ent_type in {ent.other_station: "other", ent.tag_station: "tag"}.items():
            if st == "":
                continue
            try:
                so = SurvexStation.objects.filter(name=st)
            except:
                message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'. EXCEPTION."
                stash_data_issue(parser="positions", message=message, url=url)
                print(message)
                bads +=1
                continue
                
            if so.count() == 1:
                good +=1
                # print(f"OK - Entrance {ent}  '{ent_type}' station '{st}'")
                continue
            if so.count() != 0:
                message =f"{so.count()} found for  Entrance {ent}  '{ent_type}' station '{st}' {so}"
            else:
                # not found
                message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'"
            if st == ent.best_station():
                message = message + " - AND THIS IS THE 'BEST' ONE"
            else:
                message = message + " - not the 'best'"
            stash_data_issue(parser="positions", message=message, url=url)
            print(message)
            bads +=1
            station_lower_case(st)
            continue
 
                    
    if ent:
        return validate_ent(ent)
    

    for ent in Entrance.objects.all():
        validate_ent(ent)
            
    print(f" -  {good} valid SurvexStations of all types found on Entrances.")     
    print(f" -  {bads} bad   SurvexStations of all types found on Entrances.")     
    return True # not necessarily.. but unused return value
            


poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")

def LoadPositions():
    """First load the survex stations for entrances and fixed points (about 600) into the database.
    Run 'cavern' to produce a complete .3d file, then run 'survexport -pos' to produce a table of
    all survey point positions in UTM cooridnates. Then lookup each of the 600 positions by name to 
    see if we have it in the database and if we do, then save the UTM x/y/z coordinates. 
    This gives us coordinates of the entrances.
    If we don't have it in the database, print an error message and discard it.
    """
    svx_t = 0
    d3d_t = 0
    
    DataIssue.objects.filter(parser="positions").delete()
    SurvexStation.objects.all().delete()


    def runcavern3d():
        outputdir = Path(str(f"{topdata}.svx")).parent

        # print(" -  Regenerating stale cavern .log and .3d for '{}'\n    days old: {:.1f}    {:.1f}    {:.1f}".
        #    format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))

        file3d = Path(f"{topdata}.3d")
        try:
            sp = subprocess.run(
                [settings.CAVERN, "--log", f"--output={outputdir}", f"{topdata}.svx"],
                capture_output=True,
                check=False,
                text=True,
            )  # check=False means exception not raised
            if sp.returncode != 0:
                message = f" ! Error: cavern: creating {file3d} in runcavern3()"
                stash_data_issue(parser="positions", message=message)
                print(message)

                # find the errors in the 1623.log file
                sp = subprocess.run(
                    ["grep", "error:", f"{topdata}.log"], capture_output=True, check=False, text=True
                )  # check=False means exception not raised
                message = f" ! Error: cavern: {sp.stdout} creating {file3d} "
                stash_data_issue(parser="positions", message=message)
                print(message)

        except:
            message = f" ! CalledProcessError 'cavern' in runcavern3() at {topdata}."
            stash_data_issue(parser="positions", message=message)
            print(message)

            if file3d.is_file():
                message = f" ! CalledProcessError. File permissions {file3d.stat().st_mode} on {str(file3d)}"
                stash_data_issue(parser="positions", message=message)
                print(message)

        if file3d.is_file():  # might be an old one though
            try:
                # print(" -  Regenerating {} {}.3d  in  {}".format(settings.SURVEXPORT, topdata, settings.SURVEX_DATA))
                sp = subprocess.run(
                    [settings.SURVEXPORT, "--pos", f"{file3d}"],
                    cwd=settings.SURVEX_DATA,
                    capture_output=True,
                    check=False,
                    text=True,
                )
                if sp.returncode != 0:
                    print(
                        f" ! Error: survexport creating {topdata}.pos in runcavern3().\n\n"
                        + str(sp.stdout)
                        + "\n\nreturn code: "
                        + str(sp.returncode)
                    )
            except:
                message = f" ! CalledProcessError 'survexport' in runcavern3() at {file3d}."
                stash_data_issue(parser="positions", message=message)
                print(message)
        else:
            message = f" ! Failed to find {file3d} so aborting generation of new .pos, using old one if present"
            stash_data_issue(parser="positions", message=message)
            print(message)

    topdata = os.fspath(Path(settings.SURVEX_DATA) / settings.SURVEX_TOPNAME)
    print(f" - Generating a list of Pos from {topdata}.svx and then loading...")

    found = 0
    print("\n")  # extra line because cavern overwrites the text buffer somehow
    # cavern defaults to using same cwd as supplied input file

    completed_process = subprocess.run(["which", f"{settings.CAVERN}"], capture_output=True, check=True, text=True)
    cav_t = os.path.getmtime(completed_process.stdout.strip())

    svxpath = topdata + ".svx"
    d3dpath = topdata + ".3d"
    pospath = topdata + ".pos"

    svx_t = os.path.getmtime(svxpath)

    if os.path.isfile(d3dpath):
        # always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
        d3d_t = os.path.getmtime(d3dpath)

    now = time.time()
    if not os.path.isfile(pospath):
        runcavern3d()
    if not os.path.isfile(d3dpath):
        runcavern3d()
    elif d3d_t - svx_t > 0:  # stale, 3d older than svx file
        runcavern3d()
    elif now - d3d_t > 24 * 60 * 60:  # >1 days old, re-run anyway
        runcavern3d()
    elif cav_t - d3d_t > 0:  # new version of cavern
        runcavern3d()

    mappoints = {}
    found_points = {}
    pts = MapLocations().points()
    for pt in pts:
        svxid, number, point_type, ent = pt
        #((st, str(ent), ent.needs_surface_work(), ent))
        if svxid in mappoints:
            print(f" =  seen this svxid  {svxid} for {ent} already on entrance {mappoints[svxid]}")
        else:
            mappoints[svxid] = ent
        if svxid =="1":
            print(f"BOGUS {pt}") # this is now checked for when importing the entrance stations in parsers/caves.py

    if not Path(pospath).is_file():
        message = f" ! Failed to find {pospath} so aborting generation of entrance locations. "
        stash_data_issue(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit")
        print(message)
        return

    posfile = open(pospath)
    posfile.readline()  # Drop header
    
    sbdict = {}
    dups = 0
    lineno = 1 # we dropped the header
    for line in posfile.readlines():
        lineno += 1
        r = poslineregex.match(line)
        if r:
            x, y, z, sbid = r.groups() # renamed id to sbid so as to not confuse with Django internal .id
            if sbid in sbdict:
                dups += 1
                message = f" ! DUPLICATE SurvexBlock identifier in .pos file '{sbid}'\n{sbs[sbid]}\n{lineno} / {line}"
                print(message)
                stash_data_issue(parser="positions", message=message)
            else:
                sbdict[sbid] = lineno
 

            for sid in mappoints:
                if not sid: # catch None entry
                    continue
                if sbid.endswith(sid) or sbid.endswith(sid.lower()):
                    blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints
                    if sid in found_points:
                        found_points[sid] += 1
                    else:
                        found_points[sid] = 1
                   
                    try:
                        ss = SurvexStation(name=sbid)
                        ss.x = float(x)
                        ss.y = float(y)
                        ss.z = float(z)
                        ss.entrance = mappoints[sid]
                        ss.save()
                        found += 1
                    except:
                        message = f" ! {lineno} FAIL to create SurvexStation Entrance point {blockpath} {sid}"
                        print(message)
                        stash_data_issue(parser="positions", message=message)
                        store_data_issues()
                        raise
    validate_entrance_stations() # do not need to use db here really
    positions_filename = Path(pospath).name
    print(f" -  {found} distinct SurvexStation entrance stations identified in {lineno:,} lines in {positions_filename}.")
    if dups > 0:
        print(f" -  {dups} Duplicated SurvexStation entrances found")
        
    # for p in mappoints:
        # if p not in found_points:
            # print(f"Valid point {p} NOT found in {positions_filename}")
    # print(f" -  {len(mappoints)} mappoints,   {len(found_points)} found_points")
    # for sid in found_points:
        # if found_points[sid] > 1:
            # print(f" -  {sid} - {found_points[sid]}")
    store_data_issues()