summaryrefslogtreecommitdiffstats
path: root/parsers
diff options
context:
space:
mode:
Diffstat (limited to 'parsers')
-rw-r--r--parsers/caves.py163
-rw-r--r--parsers/logbooks.py2
-rw-r--r--parsers/survex.py174
3 files changed, 188 insertions, 151 deletions
diff --git a/parsers/caves.py b/parsers/caves.py
index e9be47b..4b8178a 100644
--- a/parsers/caves.py
+++ b/parsers/caves.py
@@ -87,8 +87,10 @@ def do_pending_cave(k, url, area_1623):
'''
default for a PENDING cave, should be overwritten in the db later if a real cave of the same name exists
in expoweb/cave_data/1623-"k".html
+
+ oops. Now need to do for 1626 area too
'''
- slug = "1623-" + k
+ slug = k
default_note = f"_Survex file found in loser repo but no description in expoweb <br><br><br>\n"
default_note += f"INSTRUCTIONS: First open 'This survex file' (link above the CaveView panel) to find the date and info. Then "
@@ -151,78 +153,6 @@ def do_pending_cave(k, url, area_1623):
print(message)
-def readcaves():
- '''Reads the xml-format HTML files in the EXPOWEB repo, not from the loser repo.
- '''
- # For those caves which do not have cave_data/1623-xxx.html XML files even though they exist and have surveys
- # should put this in a simple list which can be edited using 'Edit this file'
- pending = set()
- fpending = Path(CAVEDESCRIPTIONS, "pendingcaves.txt")
- if fpending.is_file():
- with open(fpending, "r") as fo:
- cids = fo.readlines()
- for cid in cids:
- pending.add(cid.rstrip('\n'))
-
- with transaction.atomic():
- print(" - Deleting Caves and Entrances")
- # attempting to avoid MariaDB crash when doing this
- try:
- Area.objects.all().delete()
- except:
- pass
- try:
- Cave.objects.all().delete()
- except:
- pass
- try:
- Entrance.objects.all().delete()
- except:
- pass
- # Clear the cave data issues and the caves as we are reloading
- DataIssue.objects.filter(parser='areas').delete()
- DataIssue.objects.filter(parser='caves').delete()
- DataIssue.objects.filter(parser='caves ok').delete()
- DataIssue.objects.filter(parser='entrances').delete()
-
- print(" - Creating Areas 1623 and 1626")
- # This crashes on the server with MariaDB even though a null parent is explicitly allowed.
- area_1623= Area.objects.create(short_name = "1623", super=None)
- print(" - Saving Area 1623")
- area_1623.save()
- area_1626= Area.objects.create(short_name = "1626", super=None)
- print(" - Saving Area 1626")
- area_1626.save()
-
- print (" - Setting pending caves")
- # Do this first, so that these empty entries are overwritten as they get properly created.
-
-
- for k in pending:
- url = "1623/" + k # Note we are not appending the .htm as we are modern folks now.
- try:
- do_pending_cave(k, url, area_1623)
- except:
- message = " ! Error. Cannot create pending cave and entrance, pending-id:{}".format(k)
- DataIssue.objects.create(parser='caves', message=message)
- print(message)
- raise
-
- with transaction.atomic():
- print(" - settings.CAVEDESCRIPTIONS: ", CAVEDESCRIPTIONS)
- print(" - Reading Entrances from entrance descriptions xml files")
- for filename in next(os.walk(ENTRANCEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
- # if filename.endswith('.html'):
- # if Path(filename).stem[5:] in pending:
- # print(f'Skipping pending entrance dummy file <{filename}>')
- # else:
- # readentrance(filename)
- readentrance(filename)
-
- print(" - Reading Caves from cave descriptions xml files")
- for filename in next(os.walk(CAVEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
- if filename.endswith('.html'):
- readcave(filename)
def readentrance(filename):
global entrances_xslug
@@ -484,4 +414,89 @@ def getXML(text, itemname, minItems = 1, maxItems = None, printwarnings = True,
"max": maxItems} + " in file " + context
DataIssue.objects.create(parser='caves', message=message)
print(message)
- return items \ No newline at end of file
+ return items
+
+def readcaves():
+ '''Reads the xml-format HTML files in the EXPOWEB repo, not from the loser repo.
+ '''
+ # For those caves which do not have cave_data/1623-xxx.html XML files even though they exist and have surveys
+ # should put this in a simple list which can be edited using 'Edit this file'
+ pending = set()
+ fpending = Path(CAVEDESCRIPTIONS, "pendingcaves.txt")
+ if fpending.is_file():
+ with open(fpending, "r") as fo:
+ cids = fo.readlines()
+ for cid in cids:
+ pending.add(cid.rstrip('\n'))
+
+ with transaction.atomic():
+ print(" - Deleting Caves and Entrances")
+ # attempting to avoid MariaDB crash when doing this
+ try:
+ Area.objects.all().delete()
+ except:
+ pass
+ try:
+ Cave.objects.all().delete()
+ except:
+ pass
+ try:
+ Entrance.objects.all().delete()
+ except:
+ pass
+ # Clear the cave data issues and the caves as we are reloading
+ DataIssue.objects.filter(parser='areas').delete()
+ DataIssue.objects.filter(parser='caves').delete()
+ DataIssue.objects.filter(parser='caves ok').delete()
+ DataIssue.objects.filter(parser='entrances').delete()
+
+ print(" - Creating Areas 1623 and 1626")
+ # This crashes on the server with MariaDB even though a null parent is explicitly allowed.
+ area_1623= Area.objects.create(short_name = "1623", super=None)
+ print(" - Saving Area 1623")
+ area_1623.save()
+ area_1624= Area.objects.create(short_name = "1624", super=None)
+ print(" - Saving Area 1624")
+ area_1624.save()
+
+ area_1626= Area.objects.create(short_name = "1626", super=None)
+ print(" - Saving Area 1626")
+ area_1626.save()
+
+ print (" - Setting pending caves")
+ # Do this first, so that these empty entries are overwritten as they get properly created.
+
+ for k in pending:
+
+ url = k.replace("-","/") # Note we are not appending the .htm as we are modern folks now.
+ area = area_1623
+ areanum = k[0:3]
+ if areanum == "1623":
+ area = area_1623
+ if areanum == "1624":
+ area = area_1624
+ if areanum == "1626":
+ area = area_1626
+ try:
+ do_pending_cave(k, url, area)
+ except:
+ message = f" ! Error. Cannot create pending cave and entrance, pending-id:{k} in area {areanum}"
+ DataIssue.objects.create(parser='caves', message=message)
+ print(message)
+ raise
+
+ with transaction.atomic():
+ print(" - settings.CAVEDESCRIPTIONS: ", CAVEDESCRIPTIONS)
+ print(" - Reading Entrances from entrance descriptions xml files")
+ for filename in next(os.walk(ENTRANCEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
+ # if filename.endswith('.html'):
+ # if Path(filename).stem[5:] in pending:
+ # print(f'Skipping pending entrance dummy file <{filename}>')
+ # else:
+ # readentrance(filename)
+ readentrance(filename)
+
+ print(" - Reading Caves from cave descriptions xml files")
+ for filename in next(os.walk(CAVEDESCRIPTIONS))[2]: #Should be a better way of getting a list of files
+ if filename.endswith('.html'):
+ readcave(filename)
diff --git a/parsers/logbooks.py b/parsers/logbooks.py
index 09da662..cc589f0 100644
--- a/parsers/logbooks.py
+++ b/parsers/logbooks.py
@@ -59,7 +59,7 @@ noncaveplaces = [ "QMplaceholder", "Journey", "Loser Plateau", "UNKNOWN", 'plate
logdataissues = TROG['issues']['logdataissues']
trips ={}
-entries = { "2019": 36, "2018": 74, "2017": 60, "2016": 81, "2015": 79,
+entries = { "2019": 44, "2018": 74, "2017": 60, "2016": 81, "2015": 79,
"2014": 65, "2013": 51, "2012": 75, "2011": 68, "2010": 22, "2009": 52,
"2008": 49, "2007": 111, "2006": 60, "2005": 55, "2004": 76, "2003": 40, "2002": 31,
"2001": 48, "2000": 54, "1999": 79, "1998": 43, "1997": 53, "1996": 94, "1995": 41,
diff --git a/parsers/survex.py b/parsers/survex.py
index 9443603..4a5bba4 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -86,6 +86,9 @@ class MapLocations(object):
def __str__(self):
return "{} map locations".format(len(self.p))
+
+def get_offending_filename(path):
+ return "/survexfile/" + path + ".svx"
class SurvexLeg():
"""No longer a models.Model subclass, so no longer a database table
@@ -186,7 +189,7 @@ class LoadingSurvex():
def __init__(self):
self.caveslist = GetCaveLookup()
pass
-
+
def LoadSurvexFallThrough(self, survexblock, line, cmd):
if cmd == "require":
pass # should we check survex version available for processing?
@@ -195,13 +198,13 @@ class LoadingSurvex():
pass # we ignore all these, which is fine.
else:
if cmd in ["include", "data", "flags", "title", "entrance","set", "units", "alias", "ref"]:
- message = "! Unparsed [*{}]: '{}' {} - not an error (probably)".format(cmd, line, survexblock.survexfile.path)
- print((self.insp+message))
- DataIssue.objects.create(parser='survex', message=message)
+ message = "! Warning. Unparsed [*{}]: '{}' {} - not an error (probably)".format(cmd, line, survexblock.survexfile.path)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
else:
message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
- print((self.insp+message))
- DataIssue.objects.create(parser='survex', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
def LoadSurvexTeam(self, survexblock, line):
"""Interpeting the *team fields means interpreting older style survex as well as current survex standard,
@@ -241,7 +244,7 @@ class LoadingSurvex():
self.flagsstar["splayalias"] = True
else:
message = "! Bad *ALIAS: '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message)
def LoadSurvexUnits(self, survexblock, line):
@@ -255,7 +258,7 @@ class LoadingSurvex():
self.unitsfactor = float(factor)
if debugprint:
message = "! *UNITS NUMERICAL conversion [{}x] '{}' ({}) {}".format(factor, line, survexblock, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survexunits', message=message)
feet = re.match("(?i).*feet$",line)
@@ -266,7 +269,7 @@ class LoadingSurvex():
self.units = "metres"
else:
message = "! *UNITS in YARDS!? - not converted '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survexunits', message=message)
def LoadSurvexDate(self, survexblock, line):
@@ -282,7 +285,7 @@ class LoadingSurvex():
expeditions = findexpedition(year)
if len(expeditions) != 1 :
message = f"! More than one expedition in year {year} '{line}' ({survexblock}) {survexblock.survexfile.path}"
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survexunits', message=message)
expo= expeditions[0]
@@ -311,8 +314,8 @@ class LoadingSurvex():
setdate(year)
else:
message = "! DATE unrecognised '{}' ({}) {}".format(line, survexblock, survexblock.survexfile.path)
- print((self.insp+message))
- DataIssue.objects.create(parser='survex', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
def LoadSurvexLeg(self, survexblock, sline, comment):
"""This reads compass, clino and tape data but only keeps the tape lengths,
@@ -352,30 +355,36 @@ class LoadingSurvex():
ls = sline.lower().split()
# skip all splay legs
- if ls[datastar["from"]] == ".." or ls[datastar["from"]] == ".":
- if debugprint:
- print("Splay in ", survexblock.survexfile.path)
- return
- if ls[datastar["to"]] == ".." or ls[datastar["to"]] == ".":
- if debugprint:
- print("Splay in ", survexblock.survexfile.path)
- return
- if self.flagsstar["splayalias"]:
- if ls[datastar["from"]] == "-":
+ try:
+ if ls[datastar["from"]] == ".." or ls[datastar["from"]] == ".":
if debugprint:
- print("Aliased splay in ", survexblock.survexfile.path)
+ print("Splay in ", survexblock.survexfile.path)
return
- if ls[datastar["to"]] == "-":
+ if ls[datastar["to"]] == ".." or ls[datastar["to"]] == ".":
if debugprint:
- print("Aliased splay in ", survexblock.survexfile.path)
+ print("Splay in ", survexblock.survexfile.path)
return
+ if self.flagsstar["splayalias"]:
+ if ls[datastar["from"]] == "-":
+ if debugprint:
+ print("Aliased splay in ", survexblock.survexfile.path)
+ return
+ if ls[datastar["to"]] == "-":
+ if debugprint:
+ print("Aliased splay in ", survexblock.survexfile.path)
+ return
+ except:
+ message = ' ! datastar parsing from/to incorrect in line %s in %s' % (ls, survexblock.survexfile.path)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
+ return
try:
tape = ls[datastar["tape"]]
except:
message = ' ! datastar parsing incorrect in line %s in %s' % (ls, survexblock.survexfile.path)
- print((self.insp+message))
- DataIssue.objects.create(parser='survexleg', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
survexleg.tape = invalid_tape
return
# e.g. '29/09' or '(06.05)' in the tape measurement
@@ -387,34 +396,34 @@ class LoadingSurvex():
tape = float(tape) * self.unitsfactor
if debugprint:
message = " ! Units: Length scaled {}m '{}' in ({}) units:{} factor:{}x".format(tape, ls, survexblock.survexfile.path, self.units, self.unitsfactor)
- print((self.insp+message))
- DataIssue.objects.create(parser='survexleg', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
if self.units =="feet":
tape = float(tape) / METRESINFEET
if debugprint:
message = " ! Units: converted to {:.3f}m from {} '{}' in ({})".format(tape, self.units, ls, survexblock.survexfile.path)
- print((self.insp+message))
- DataIssue.objects.create(parser='survexleg', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
survexleg.tape = float(tape)
self.legsnumber += 1
except ValueError:
message = " ! Value Error: Tape misread in line'{}' in {} units:{}".format(ls, survexblock.survexfile.path, self.units)
- print((self.insp+message))
- DataIssue.objects.create(parser='survexleg', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
survexleg.tape = invalid_tape
try:
survexblock.legslength += survexleg.tape
self.slength += survexleg.tape
except ValueError:
message = " ! Value Error: Tape length not added '{}' in {} units:{}".format(ls, survexblock.survexfile.path, self.units)
- print((self.insp+message))
- DataIssue.objects.create(parser='survexleg', message=message)
+ print(self.insp+message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
try:
lcompass = ls[datastar["compass"]]
except:
message = ' ! Value Error: Compass not found in line %s in %s' % (ls, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survexleg', message=message)
lcompass = invalid_compass
@@ -425,7 +434,7 @@ class LoadingSurvex():
print((" datastar:", datastar))
print((" Line:", ls))
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
- DataIssue.objects.create(parser='survexleg', message=message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
lclino = invalid_clino
if lclino == "up":
@@ -445,7 +454,7 @@ class LoadingSurvex():
print((" Line:", ls))
message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass,
ls, survexblock.survexfile.path)
- DataIssue.objects.create(parser='survexleg', message=message)
+ DataIssue.objects.create(parser='survexleg', message=message, url=get_offending_filename(survexblock.survexfile.path))
survexleg.compass = invalid_compass
# delete the object to save memory
@@ -453,17 +462,17 @@ class LoadingSurvex():
def LoadSurvexRef(self, survexblock, args):
#print(self.insp+ "*REF ---- '"+ args +"'")
- url=f'/survexfile/{survexblock.survexfile.path}'
+ url= get_offending_filename(survexblock.survexfile.path)
# *REF but also ; Ref years from 1960 to 2039
refline = self.rx_ref_text.match(args)
if refline:
# a textual reference such as "1996-1999 Not-KH survey book pp 92-95"
- print(self.insp+ "*REF quoted text so ignored:"+ args)
+ print(f'{self.insp} *REF quoted text so ignored:{args} in {survexblock.survexfile.path}')
return
if len(args)< 4:
message = " ! Empty or BAD *REF statement '{}' in '{}'".format(args, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message, url=url)
return
@@ -484,18 +493,18 @@ class LoadingSurvex():
wallet = "0" + wallet
if not (int(yr)>1960 and int(yr)<2039):
message = " ! Wallet year out of bounds {yr} '{refscan}' {survexblock.survexfile.path}"
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message, url=url)
refscan = "%s#%s%s" % (yr, letterx, wallet)
try:
if int(wallet)>99:
- message = " ! Wallet *REF {} - too big in '{}'".format(refscan, survexblock.survexfile.path)
- print((self.insp+message))
+ message = " ! Wallet *REF {} - very big (more than 99) so probably wrong in '{}'".format(refscan, survexblock.survexfile.path)
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message, url=url)
except:
message = " ! Wallet *REF {} - not numeric in '{}' -- parsing continues".format(refscan, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message, url=url)
manywallets = Wallet.objects.filter(walletname=refscan)
if manywallets:
@@ -504,11 +513,11 @@ class LoadingSurvex():
survexblock.save()
if len(manywallets) > 1:
message = " ! Wallet *REF {} - more than one found {} scan folders in {}".format(refscan, len(manywallets), survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message, url=url)
else:
message = " ! Wallet *REF '{}' - NOT found '{}'".format(refscan, survexblock.survexfile.path)
- print((self.insp+message))
+ print(self.insp+message)
DataIssue.objects.create(parser='survex', message=message, url=url)
def TickSurvexQM(self, survexblock, qmtick):
@@ -520,11 +529,11 @@ class LoadingSurvex():
#raise
message = f' ! QM TICK find FAIL QM{qmtick.group(1)} date:"{qmtick.group(2)}" qmlist:"{qm}" in "{survexblock.survexfile.path}" + comment:"{qmtick.group(3)}" '
print(message)
- DataIssue.objects.create(parser='survex', message=message, url=f'/survexfile/{survexblock.survexfile.path}.svx')
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
if len(qm)>1:
message = f' ! QM TICK MULTIPLE found FAIL QM{qmtick.group(1)} date:"{qmtick.group(2)}" in "{survexblock.survexfile.path}" + comment:"{qmtick.group(3)}" '
print(message)
- DataIssue.objects.create(parser='survex', message=message, url=f'/survexfile/{survexblock.survexfile.path}.svx')
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
qm[0].ticked = True
qm[0].save()
@@ -578,14 +587,10 @@ class LoadingSurvex():
expoyear = str(survexblock.date.year),
cave = survexblock.survexfile.cave)
qm.save
- if survexblock.survexfile.cave.kataster_number == "359":
- message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
- print(insp+message)
- DataIssue.objects.create(parser='survex', message=message)
except:
message = " ! QM{} FAIL to create {} in'{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
print(insp+message)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
def LoadSurvexDataNormal(self,survexblock,args):
"""Sets the order for data elements in this and following blocks, e.g.
@@ -614,12 +619,19 @@ class LoadingSurvex():
message = " ! - Unrecognised *data normal statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
print(message)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
return
else:
datastar = self.datastardefault
# ls = ["normal", "from", "to", "tape", "compass", "clino" ]
for i in range(1, len(ls)): # len[0] is "normal"
+ if ls[i].lower() =="newline":
+ message = f" ! - ABORT *data statement has NEWLINE in it in {survexblock.survexfile.path}. Not parsed by torggle. '{args}'"
+ print(message)
+ print(message,file=sys.stderr)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
+ return False
+
if ls[i] in ["bearing","compass"]:
datastar["compass"] = i-1
if ls[i] in ["clino","gradient"]:
@@ -638,7 +650,7 @@ class LoadingSurvex():
message = " ! - Unrecognised *data statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
print(message)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
def LoadSurvexFlags(self, args):
# Valid flags are DUPLICATE, SPLAY, and SURFACE, and a flag may be preceded with NOT to turn it off.
@@ -709,7 +721,7 @@ class LoadingSurvex():
self.survexdict[self.svxdirs[headpath.lower()]] = [] # list of the files in the directory
return self.svxdirs[headpath.lower()]
- def ReportNonCaveIncludes(self, headpath, includelabel):
+ def ReportNonCaveIncludes(self, headpath, includelabel, depth):
"""Ignore surface, kataser and gpx *include survex files
"""
if headpath in self.ignorenoncave:
@@ -723,11 +735,11 @@ class LoadingSurvex():
#print("\n"+message)
#print("\n"+message,file=sys.stderr)
return
- message = f" ! Error: '{headpath}' FAILURE (while creating '{includelabel}' in db - not a cave or in the ignore list of surface surveys. )"
+ message = f" ! Error: FAILURE '{headpath}' while creating '{includelabel}' at depth:'{depth}'. Not a cave or in the ignore list:'{self.ignoreprefix}'"
# getting this triggered for gpx/2018 (cavern error) but not for gpx/2017 (no content).
print("\n"+message)
print("\n"+message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(headpath))
print(f' # datastack in LoadSurvexFile:{includelabel} type:', end="",file=sys.stderr)
for dict in self.datastack:
print(f'{dict["type"].upper()} ', end="",file=sys.stderr)
@@ -763,14 +775,14 @@ class LoadingSurvex():
message = " ! 'None' SurvexDirectory returned from GetSurvexDirectory({})".format(headpath)
print(message)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url = f'/survexfile/{svxid}')
if cave:
newdirectory.cave = cave
newfile.cave = cave
# print(f"\n - New directory {newdirectory} for cave {newdirectory.cave}",file=sys.stderr)
- else: # probably a surface survey
- self.ReportNonCaveIncludes(headpath, svxid)
+ else: # probably a surface survey, or a cave in a new area e.g. 1624 not previously managed, and not in the pending list
+ self.ReportNonCaveIncludes(headpath, svxid, depth)
if not newfile.survexdirectory:
message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(svxid)
@@ -845,7 +857,7 @@ class LoadingSurvex():
else:
message = f' ! QM Unrecognised as valid in "{survexblock.survexfile.path}" QM{qml.group(1)} "{qml.group(2)}" : regex failure, typo?'
print(message)
- DataIssue.objects.create(parser='survex', message=message, url=f'/survexfile/{survexblock.survexfile.path}.svx')
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(survexblock.survexfile.path))
included = self.rx_comminc.match(comment)
@@ -1068,7 +1080,11 @@ class LoadingSurvex():
print(" # CHANGE 'any' flag now:'{}' was:{} ".format(self.flagsstar["skiplegs"], oldflags["skiplegs"]))
elif self.rx_data.match(cmd):
- self.LoadSurvexDataNormal(survexblock, args)
+ if self.LoadSurvexDataNormal(survexblock, args):
+ pass
+ else:
+ # Abort, we do not cope with this *data format
+ return
elif self.rx_alias.match(cmd):
self.LoadSurvexAlias(survexblock, args)
elif self.rx_entrance.match(cmd):
@@ -1085,7 +1101,7 @@ class LoadingSurvex():
message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path)
print(message)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, )
else:
self.LoadSurvexFallThrough(survexblock, args, cmd)
@@ -1120,7 +1136,7 @@ class LoadingSurvex():
self.legsnumber = nlegstotal
self.slength = slengthtotal
-
+
def PushdownStackScan(self, survexblock, path, fin, flinear, fcollate):
"""Follows the *include links in all the survex files from the root file 1623.svx
and reads only the *include and *begin and *end statements. It produces a linearised
@@ -1130,23 +1146,24 @@ class LoadingSurvex():
indent = " " * self.depthinclude
sys.stderr.flush();
self.callcount +=1
+
if self.callcount % 10 ==0 :
print(".", file=sys.stderr,end='')
if self.callcount % 500 ==0 :
print("\n ", file=sys.stderr,end='')
if path in self.svxfileslist:
- message = " * Warning. Duplicate detected in *include list at callcount:{} depth:{} file:{}".format(self.callcount, self.depthinclude, path)
+ message = f" * Warning. Duplicate detected. We have already seen this *include '{path}' from another survex file. Detected at callcount:{self.callcount} depth:{self.depthinclude}"
print(message)
print(message,file=flinear)
print("\n"+message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
if self.svxfileslist.count(path) > 20:
- message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(path)
+ message = " ! ERROR. Survex file already *included 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(path)
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
return
self.svxfileslist.append(path)
@@ -1157,8 +1174,7 @@ class LoadingSurvex():
message = f" ! ERROR *include file '{path}' in '{survexblock}' has UnicodeDecodeError"
print(message)
print(message,file=sys.stderr)
- offendingfile = "/survexfile/" + path + ".svx"
- DataIssue.objects.create(parser='survex', message=message, url=offendingfile)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
return # skip this survex file and all things *included in it
@@ -1173,7 +1189,7 @@ class LoadingSurvex():
message = message + f" - line {self.lineno} {survexblock}\n - Parsing aborted. NERD++ needed to fix it"
print(message)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
return # skip this survex file and all things *included in it
includestmt =self.rx_include.match(svxline)
@@ -1208,7 +1224,7 @@ class LoadingSurvex():
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, pop))
fcollate.write(";*edulcni {}\n".format(pop))
fininclude.close()
@@ -1218,7 +1234,7 @@ class LoadingSurvex():
message = " ! ERROR *include file not found for:'{}'".format(includepath)
print(message)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
elif re.match("(?i)begin$", cmd):
self.depthbegin += 1
depth = " " * self.depthbegin
@@ -1240,7 +1256,7 @@ class LoadingSurvex():
print(message)
print(message,file=flinear)
print(message,file=sys.stderr)
- DataIssue.objects.create(parser='survex', message=message)
+ DataIssue.objects.create(parser='survex', message=message, url=get_offending_filename(path))
self.depthbegin -= 1
pass
@@ -1302,6 +1318,12 @@ class LoadingSurvex():
logpath = Path(fullpath + ".log")
outputdir = Path(svxpath).parent
+ if not svxpath.is_file():
+ message = f' ! BAD survex file "{fullpath}" specified in *include (somewhere).. '
+ DataIssue.objects.create(parser='entrances', message=message)
+ print(message)
+ return
+
if not logpath.is_file(): # always run if logfile not there
runcavern()
return