diff options
-rw-r--r-- | core/models/caves.py | 31 | ||||
-rw-r--r-- | core/models/survex.py | 2 | ||||
-rw-r--r-- | core/views/caves.py | 4 | ||||
-rw-r--r-- | core/views/prospect.py | 2 | ||||
-rw-r--r-- | core/views/statistics.py | 6 | ||||
-rw-r--r-- | parsers/caves.py | 16 | ||||
-rw-r--r-- | parsers/locations.py | 156 | ||||
-rw-r--r-- | parsers/logbooks.py | 2 | ||||
-rw-r--r-- | parsers/survex.py | 15 |
9 files changed, 144 insertions, 90 deletions
diff --git a/core/models/caves.py b/core/models/caves.py index 624239d..cac62a8 100644 --- a/core/models/caves.py +++ b/core/models/caves.py @@ -14,7 +14,7 @@ from troggle.core.models.survex import SurvexStation, utmToLatLng from troggle.core.models.troggle import DataIssue, TroggleModel from troggle.core.utils import TROG, writetrogglefile -# Use the TROG global object to cache the cave lookup list. No good for multi-user.. +# Use the TROG global object to cache the cave lookup list. No good for multi-user.., or even multi-page. Pointless in fact. Gcavelookup = TROG["caves"]["gcavelookup"] Gcave_count = TROG["caves"]["gcavecount"] @@ -63,7 +63,7 @@ class CaveAndEntrance(models.Model): # moved to models/logbooks.py to avoid cyclic import problem. No I don't know why either. class Cave(TroggleModel): - # too much here perhaps, + # (far) too much here perhaps, areacode = models.CharField(max_length=4, blank=True, null=True) # could use models.IntegerChoices subarea = models.CharField(max_length=25, blank=True, null=True) # 9, 8c etc. depth = models.CharField(max_length=100, blank=True, null=True) @@ -72,7 +72,7 @@ class Cave(TroggleModel): equipment = models.TextField(blank=True, null=True) explorers = models.TextField(blank=True, null=True) extent = models.CharField(max_length=100, blank=True, null=True) - filename = models.CharField(max_length=200) + filename = models.CharField(max_length=200) # if a cave is 'pending' this is not set. Otherwise it is. kataster_code = models.CharField(max_length=20, blank=True, null=True) kataster_number = models.CharField(max_length=10, blank=True, null=True) kataster_status = models.TextField(blank=True, null=True) @@ -87,12 +87,9 @@ class Cave(TroggleModel): unofficial_number = models.CharField(max_length=60, blank=True, null=True) url = models.CharField(max_length=300, blank=True, null=True, unique = True) - # class Meta: - # unique_together = (("area", "kataster_number"), ("area", "unofficial_number")) - - # href = models.CharField(max_length=100) - class Meta: + # we do not enforce uniqueness at the db level as that causes confusing errors for users. + # unique_together = (("area", "kataster_number"), ("area", "unofficial_number")) ordering = ("kataster_code", "unofficial_number") def slug(self): @@ -113,10 +110,6 @@ class Cave(TroggleModel): else: return self.unofficial_number - # def reference(self): # tidy this up, no longer used? - # REMOVE because of confusion with cave.references which is different - # return f"{self.areacode}-{self.number()}" - def get_absolute_url(self): # we do not use URL_ROOT any more. if self.kataster_number: @@ -159,6 +152,13 @@ class Cave(TroggleModel): def entrances(self): return CaveAndEntrance.objects.filter(cave=self) + + def no_location(self): + no_data = True + for e in CaveAndEntrance.objects.filter(cave=self): + if e.entrance.best_station: + no_data = False + return no_data def singleentrance(self): return len(CaveAndEntrance.objects.filter(cave=self)) == 1 @@ -215,9 +215,6 @@ class Cave(TroggleModel): content = t.render(c) return (filepath, content, "utf8") - def getArea(self): - return self.areacode - class Entrance(TroggleModel): MARKING_CHOICES = ( ("P", "Paint"), @@ -338,6 +335,10 @@ class Entrance(TroggleModel): return self.exact_station if self.other_station: return self.other_station + + def best_station_object(self): + bs = self.best_station() + return SurvexStation.objects.get(name=bs) def has_photo(self): if self.photo: diff --git a/core/models/survex.py b/core/models/survex.py index 542cfb8..68bae05 100644 --- a/core/models/survex.py +++ b/core/models/survex.py @@ -53,7 +53,7 @@ class SurvexStationLookUpManager(models.Manager): class SurvexStation(models.Model): name = models.CharField(max_length=100) # block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL) - # block not used since 2020. survex stations objects are only used for entrnce locations and all taken from the .3d file + # block not used since 2020. survex stations objects are only used for entrance locations and all taken from the .3d file objects = SurvexStationLookUpManager() # overwrites SurvexStation.objects and enables lookup() x = models.FloatField(blank=True, null=True) y = models.FloatField(blank=True, null=True) diff --git a/core/views/caves.py b/core/views/caves.py index cdac66d..5f939dc 100644 --- a/core/views/caves.py +++ b/core/views/caves.py @@ -432,7 +432,7 @@ def edit_cave(request, path="", slug=None): "cave": cave, "message": message, #"caveAndEntranceFormSet": ceFormSet, - "path": path + "/", + "path": path + "/", # used for saving images if attached }, ) @@ -552,7 +552,7 @@ def edit_entrance(request, path="", caveslug=None, entslug=None): "entletter": entletter, "entletterform": entletterform, # is unset if not being used "entlettereditable": entlettereditable, - "path": path + "/", + "path": path + "/", # used for saving images if attached }, ) diff --git a/core/views/prospect.py b/core/views/prospect.py index 1eb82cd..e9d3fde 100644 --- a/core/views/prospect.py +++ b/core/views/prospect.py @@ -237,7 +237,7 @@ def prospecting_image(request, name): (35323.60, 81357.83, 50, "74"), # From Auer map ]: (N, E, D) = list(map(float, (N, E, D))) - maparea = Cave.objects.get(kataster_number=num).getArea().short_name + maparea = Cave.objects.get(kataster_number=num).areacode lo = mungecoord(N - D, E + D, name, img) hi = mungecoord(N + D, E - D, name, img) lpos = mungecoord(N - D, E, name, img) diff --git a/core/views/statistics.py b/core/views/statistics.py index d2544e0..2927bd3 100644 --- a/core/views/statistics.py +++ b/core/views/statistics.py @@ -289,17 +289,17 @@ def eastings(request): ts = e.tag_station if ts: e.tag_ts = SurvexStation.objects.get(name=ts) - print(f"{e} {e.tag_ts} {e.tag_ts.lat()} {e.tag_ts.long()}") + #print(f"{e} {e.tag_ts} {e.tag_ts.lat()} {e.tag_ts.long()}") es = e.exact_station if es: e.tag_es = SurvexStation.objects.get(name=es) - print(f"{e} {e.tag_es} {e.tag_es.lat()} {e.tag_es.long()}") + #print(f"{e} {e.tag_es} {e.tag_es.lat()} {e.tag_es.long()}") os = e.other_station if os: e.tag_os = SurvexStation.objects.get(name=os) - print(f"{e} {e.tag_os} {e.tag_os.lat()} {e.tag_os.long()}") + #print(f"{e} {e.tag_os} {e.tag_os.lat()} {e.tag_os.long()}") except: e.tag_ss = None diff --git a/parsers/caves.py b/parsers/caves.py index cc9d22d..91985d1 100644 --- a/parsers/caves.py +++ b/parsers/caves.py @@ -10,6 +10,7 @@ from django.db import transaction from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance, GetCaveLookup from troggle.core.models.logbooks import CaveSlug +from troggle.core.models.survex import SurvexStation from troggle.core.models.troggle import DataIssue from troggle.settings import CAVEDESCRIPTIONS, ENTRANCEDESCRIPTIONS, EXPOWEB, SURVEX_DATA @@ -424,15 +425,22 @@ def boolify(boolstrs): def validate_station(station): """It is possible to break troggle entirely by getting this wrong. - These station identifiers are matched against other statsions using .endswith() + These station identifiers are matched against other stations using .endswith() in parsers/locations.py so a simple number here will match hundreds of SUrvexStation objects It should be, e.g. "1623.p240" + + We will test them against survex stations after we have loaded them. """ if station == "": return True + + # CANNOT test against locations as we have not read the survex files yet. Hmph. + + # Must have the right format in its name dot = station.find(".") if dot == -1: + print(dot) # no full stop found. Bad station identifier. raise else: @@ -540,6 +548,7 @@ def read_entrance(filename, ent=None): ent.url=url[0] for st in [ent.exact_station, ent.other_station, ent.tag_station]: + #validate_station(st) try: validate_station(st) except: @@ -547,9 +556,6 @@ def read_entrance(filename, ent=None): #http://localhost:8000/1623/2023-EBH-01/1623-2023-EBH-01:1623-2023-EBH-01_entrance_edit DataIssue.objects.create(parser="entrances", message=message, url=f"/1623/{slug}/{slug}:{slug}_entrance_edit") print(message) - # ent_issues = DataIssue.objects.filter(parser="entrances") - # print(f".. We now have {len(ent_issues)} entrance DataIssues") - return None ent.save() return ent @@ -811,6 +817,8 @@ def read_cave(filename, cave=None): message = f' ! {slug:12} survex filename does not exist :LOSER:"{survex_file[0]}" in "{filename}"' DataIssue.objects.create(parser="caves", message=message, url=f"/{slug[0:4]}/{slug}_cave_edit/") print(message) + # else: + # print(f"{slug:12} survex filename UNSET") if description_file[0]: # if not an empty string diff --git a/parsers/locations.py b/parsers/locations.py index a8f94fb..bde4671 100644 --- a/parsers/locations.py +++ b/parsers/locations.py @@ -28,7 +28,8 @@ todo = """ """ class MapLocations(object): - """Class used only for identifying the entrance locations""" + """Class used only for identifying the entrance locations + Formerly used to put locations on a prospecting map""" p = [ ("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point"), @@ -43,37 +44,97 @@ class MapLocations(object): ("laser.0_3", "LSR3", "Reference", "Laser Point 0/3"), ("laser.0_5", "LSR5", "Reference", "Laser Point 0/5"), ("225-96", "BAlm", "Reference", "Bräuning Alm trig point"), - ] + ] # 12 fixed points def points(self): + prior = len(self.p) for ent in Entrance.objects.all(): - if ent.best_station(): - # print(f"{ent.filename}", end=", ") - try: - k = ent.caveandentrance_set.all()[0].cave - except: - message = f" ! Failed to get Cave linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()} {ent.caveandentrance_set.all()}" - stash_data_issue(parser="positions", message=message) - print(message) - continue # skip this entrance - try: - areaName = k.areacode - except: - message = f" ! Failed to get areacode on cave '{k}' linked to Entrance:{ent.name} from:{ent.filename} best:{ent.best_station()}" - stash_data_issue(parser="positions", message=message) - print(message) - store_data_issues() - raise - self.p.append((ent.best_station(), f"{areaName}-{str(ent)[5:]}", ent.needs_surface_work(), str(ent))) - message = f" - {len(self.p)} entrances linked to caves." + for st, ent_type in {ent.exact_station: "exact", ent.other_station: "other", ent.tag_station: "tag"}.items(): + if st != "": + self.p.append((st, str(ent), ent.needs_surface_work(), str(ent))) + store_data_issues() + found = len(self.p) - prior + message = f" - {found} Entrance tags found - not yet validated against survex .pos file." print(message) return self.p def __str__(self): return f"{len(self.p)} map locations" -poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") +def validate_entrance_stations(ent=None): + """Now that we have the located positions, we can check if the Entrances had correct tags + """ + bads = 0 + good = 0 + + def tag_lower_case(station): + + so = SurvexStation.objects.filter(name=station.lower()) + if so.count() == 1: + message = f"X - Entrance {ent} station '{station}' should be '{station.lower()}'" + stash_data_issue(parser="positions", message=message, url=url) + print(message) + + def validate_ent(ent): + """For each of the three tag strings in an Entrance object, + validate each string as referring to a valid SurvexStation object. + But our list of created SurvexStation objects is created by taking a list of strings and using them + to select from lines in a .pos file - so this is unnecessarily indirect. + """ + nonlocal bads + nonlocal good + # {% url "editentrance" ent.entrance.url_parent cave.slug ent.entrance.slug %} + # e.g. url = f"/1623/101/1623-101:{ent}_entrance_edit" + cavelist = ent.cavelist() + if len(cavelist) == 1: + cave = cavelist[0] + url = f"/{cave.url}" + elif len(cavelist) > 1: + cave = cavelist[-1] + url = f"/{cave.url}" + else: + print(f"BUGGER {ent} {ent.cavelist()}") + url="/caves" + for st, ent_type in {ent.exact_station: "exact", ent.other_station: "other", ent.tag_station: "tag"}.items(): + if st == "": + continue + try: + so = SurvexStation.objects.filter(name=st) + if so.count() == 1: + good +=1 + # print(f"OK - Entrance {ent} '{ent_type}' station '{st}'") + continue + if so.count() != 0: + message =f"{so.count()} found for Entrance {ent} '{ent_type}' station '{st}' {so}" + else: + message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'." + stash_data_issue(parser="positions", message=message, url=url) + print(message) + bads +=1 + tag_lower_case(st) + continue + except: + message = f" ! - Entrance {ent} has invalid '{ent_type}' station '{st}'. EXCEPTION." + stash_data_issue(parser="positions", message=message, url=url) + print(message) + bads +=1 + continue + + if ent: + return validate_ent(ent) + + + for ent in Entrance.objects.all(): + validate_ent(ent) + + print(f" - {good} valid SurvexStation tags of all types found on Entrances.") + print(f" - {bads} bad SurvexStation tags of all types found on Entrances.") + return True # not necessarily.. but unused return value + + + +poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$") def LoadPositions(): """First load the survex stations for entrances and fixed points (about 600) into the database. @@ -87,6 +148,7 @@ def LoadPositions(): d3d_t = 0 DataIssue.objects.filter(parser="positions").delete() + SurvexStation.objects.all().delete() def runcavern3d(): @@ -179,7 +241,7 @@ def LoadPositions(): runcavern3d() elif d3d_t - svx_t > 0: # stale, 3d older than svx file runcavern3d() - elif now - d3d_t > 60 * 24 * 60 * 60: # >60 days old, re-run anyway + elif now - d3d_t > 24 * 60 * 60: # >1 days old, re-run anyway runcavern3d() elif cav_t - d3d_t > 0: # new version of cavern runcavern3d() @@ -193,25 +255,13 @@ def LoadPositions(): if not Path(pospath).is_file(): message = f" ! Failed to find {pospath} so aborting generation of entrance locations. " - # DataIssue.objects.create(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit") - stash_data_issue(parser="positions", message=message) + stash_data_issue(parser="positions", message=message, url=f"/entrance_data/{pospath}_edit") print(message) return posfile = open(pospath) posfile.readline() # Drop header - # not used survexblock on a SurvexStation since we stopped storing all of them in 2020: - # try: - # survexblockroot = SurvexBlock.objects.get(name=ROOTBLOCK) - # except: - # try: - # survexblockroot = SurvexBlock.objects.get(id=1) - # except: - # message = " ! FAILED to find root SurvexBlock" - # print(message) - # stash_data_issue(parser="positions", message=message) - # raise sbdict = {} dups = 0 lineno = 1 # we dropped the header @@ -228,33 +278,10 @@ def LoadPositions(): else: sbdict[sbid] = lineno + for sid in mappoints: - if sbid.endswith(sid): + if sbid.endswith(sid) or sbid.endswith(sid.lower()): blockpath = "." + sbid[: -len(sid)].strip(".") # only the most recent one that is mappoints - # print(f"# match {sid} {sbid} {blockpath}") - - # But why are we doing this? Why do we want the survexblock id for each of these ? - # ..because mostly they don't actually appear in any SVX file. We should match them up - # via the cave data, not by this half-arsed syntactic match which almost never works. PMS. - - # We are reading the .pos file so we only know the SurvexFile not the SurvexBlock. - - # if False: - # try: - # sbqs = SurvexBlock.objects.filter(survexpath=blockpath) - # if len(sbqs) == 1: - # sbqs[0] - # if len(sbqs) > 1: - # message = f" ! MULTIPLE {len(sbqs):3} SurvexBlocks '{blockpath}' from survex files mention Entrance point '{sbid}' (line {lineno})" - # print(message) - # stash_data_issue(parser="positions", message=message) - # for b in sbqs: - # print(f" - {b}") - # sbqs[0] - # except: - # message = f" ! {lineno} FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}" - # print(message) - # stash_data_issue(parser="positions", message=message) try: ss = SurvexStation(name=sbid) ss.x = float(x) @@ -268,6 +295,9 @@ def LoadPositions(): stash_data_issue(parser="positions", message=message) store_data_issues() raise - print(f" - {found} SurvexStation entrances found.") - print(f" - {dups} Duplicated SurvexStation entrances found") + validate_entrance_stations() # do not need to use db here really + positions_filename = Path(pospath).name + print(f" - {found-12} SurvexStation entrance tags indentified in {lineno:,} lines in {positions_filename}.") + if dups > 0: + print(f" - {dups} Duplicated SurvexStation entrances found") store_data_issues() diff --git a/parsers/logbooks.py b/parsers/logbooks.py index 90c13aa..33b8838 100644 --- a/parsers/logbooks.py +++ b/parsers/logbooks.py @@ -64,7 +64,7 @@ ENTRIES = { "2019": 55, "2018": 95, "2017": 74, - "2016": 86, + "2016": 87, "2015": 80, "2014": 67, "2013": 52, diff --git a/parsers/survex.py b/parsers/survex.py index 70a0962..78cfaa4 100644 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -14,6 +14,8 @@ from troggle.core.models.survex import SurvexBlock, SurvexFile, SurvexPersonRole from troggle.core.models.wallets import Wallet from troggle.core.models.troggle import DataIssue, Expedition from troggle.core.utils import chaosmonkey, get_process_memory +from troggle.core.utils import write_and_commit + from troggle.parsers.caves import create_new_cave, do_ARGE_cave, AREACODES, ARGEAREAS from troggle.parsers.people import GetPersonExpeditionNameLookup, known_foreigner @@ -1278,6 +1280,19 @@ class LoadingSurvex: if cave: newfile.cave = cave # print(f"\n - New directory '{newdirectory}' for cave '{cave}'",file=sys.stderr) + if not cave.survex_file: + cave.survex_file = svxid + ".svx" + cave.save() + # message = f" - '{cave}' had no survex_file set - setting '{svxid}.svx' writing to {cave.filename})" + message = f" - '{cave}' has no survex_file set - need to set to '{svxid}.svx' in {cave.filename})" + print("\n",message,file=sys.stderr) + stash_data_issue(parser="survex", message=message) + + # try: + # cave_file = cave.file_output() + # write_and_commit([cave_file], f"{cave} Update of cave.survex_file when parsing {svxid}.svx") + # except + # raise if not newfile.primary: message = f" ! .primary NOT SET in new SurvexFile {svxid} " |