diff options
-rw-r--r-- | parsers/locations.py | 23 | ||||
-rw-r--r-- | parsers/survex.py | 43 |
2 files changed, 43 insertions, 23 deletions
diff --git a/parsers/locations.py b/parsers/locations.py index 2984af2..9a6a65e 100644 --- a/parsers/locations.py +++ b/parsers/locations.py @@ -290,27 +290,8 @@ def LoadPositions(): d3dpath = topdata.with_suffix(".3d") pospath = topdata.with_suffix(".pos") - runcavern3d(f"Regen {settings.DEVSERVER=}") # always regenerate .3d and .pos as the *includes may have changed - # if not settings.DEVSERVER: - # runcavern3d(f"Regen - on server {settings.DEVSERVER=}") # always regenerate .3d and .pos on the server - # else: - # # These basic tests fail to capture the case where a *included svx file has changed, - # # typically this is one of the fixedpts *fix files. - # for p in [pospath, d3dpath]: - # if not p.is_file(): - # runcavern3d(f"Creating {p}.3d, .pos") - # svx_t = svxpath.stat().st_mtime - # d3d_t = d3dpath.stat().st_mtime # os.path.getmtime(d3dpath) - # svx_d = datetime.fromtimestamp(svx_t).strftime('%d %b %Y %H:%M:%S') - # d3d_d = datetime.fromtimestamp(d3d_t).strftime('%d %b %Y %H:%M:%S') - - # now = time.time() - # if d3d_t - svx_t < 0: # stale, 3d older than svx file . But .svx timestamp does not reflect *include timestamps - # runcavern3d(f"Regen - stale {d3d_d} earlier than {svx_d}") - # elif now - d3d_t > 24 * 60 * 60: # >1 days old, re-run anyway - # runcavern3d(f"Regen - old") - # elif d3d_t - cav_t < 0: # new version of cavern - # runcavern3d(f"Regen - new survex version {d3d_d} earlier than {cav_d} ") + # we do not need to do this as the previous 'survex' step in databaseReset generated the .3d and .pos file + # runcavern3d(f"Regen {settings.DEVSERVER=}") # always regenerate .3d and .pos as the *includes may have changed mappoints = {} found_points = {} diff --git a/parsers/survex.py b/parsers/survex.py index 28c16ab..4efc6f9 100644 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -2270,8 +2270,47 @@ def FindAndLoadSurvex(): svx_scan.depthinclude = 0 fullpathtotop = str(Path(survexfileroot.path).parent / survexfileroot.path) - print(f" - RunSurvexIfNeeded cavern on '{fullpathtotop}'", file=sys.stderr) - svx_scan.RunSurvexIfNeeded(fullpathtotop, fullpathtotop) + # In fact we always want to run this, and the location stuff later needs the .pos file + # so we should not be using the RunSurvexIfNeeded function. + print(f" - Running cavern on '{fullpathtotop}'", file=sys.stderr) + logpath = Path(fullpathtotop + ".log") + + try: + print( + f" - Regenerating cavern .pos .log and .3d for '{fullpathtotop}'\n at '{logpath}'\n" + ) + + outputdir = Path(str(f"{fullpathtotop}.svx")).parent + sp = subprocess.run( + [settings.CAVERN, "--log", "--pos", f"--output={outputdir}", f"{fullpathtotop}.svx"], + capture_output=True, + check=False, + text=True, + ) + + if sp.returncode != 0: + message = f" ! Error when running {settings.CAVERN}: {fullpathtotop}" + url = f"/survexfile{fullpathtotop}.svx".replace(str(settings.SURVEX_DATA), "") + stash_data_issue(parser="survex", message=message, url=url) + print(message) + print( + "stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode) + ) + self.caverncount += 1 + + # should also collect all the .err files too and create a DataIssue for each one which + # - is nonzero in size AND + # - has Error greater than 5% anywhere, or some other more serious error + + errpath = Path(fullpathtotop + ".err") + if errpath.is_file(): + if errpath.stat().st_size == 0: + errpath.unlink() # delete empty closure error file + except: + message = f' ! FAIL running cavern on survex file "{fullpathtotop}"' + stash_data_issue(parser="survex", message=message) + print(message) + svx_scan.uniquefile[str(survexfileroot)] = ["0"] indent = "" |