summaryrefslogtreecommitdiffstats
path: root/parsers
diff options
context:
space:
mode:
Diffstat (limited to 'parsers')
-rw-r--r--parsers/cavetab.py35
-rw-r--r--parsers/logbooks.py2
-rw-r--r--parsers/survex.py72
3 files changed, 85 insertions, 24 deletions
diff --git a/parsers/cavetab.py b/parsers/cavetab.py
index bf61d7f..68ea4b3 100644
--- a/parsers/cavetab.py
+++ b/parsers/cavetab.py
@@ -3,7 +3,8 @@ import troggle.core.models as models
from django.conf import settings
import csv, time, re, os, logging
from utils import save_carefully
-from utils import html_to_wiki
+from django.core.urlresolvers import reverse
+import flatpages.models
##format of CAVETAB2.CSV is
KatasterNumber = 0
@@ -54,6 +55,7 @@ Findability = 44
FindabilityComment = 45
def LoadCaveTab():
+
cavetab = open(os.path.join(settings.EXPOWEB, "noinfo", "CAVETAB2.CSV"),'rU')
caveReader = csv.reader(cavetab)
caveReader.next() # Strip out column headers
@@ -76,14 +78,14 @@ def LoadCaveTab():
if line[MultipleEntrances] == 'yes' or line[MultipleEntrances]=='': #When true, this line contains an actual cave, otherwise it is an extra entrance.
args = {}
defaultArgs = {}
-
+
def addToArgs(CSVname, modelName):
if line[CSVname]:
- args[modelName] = html_to_wiki(line[CSVname])
+ args[modelName] = line[CSVname]
def addToDefaultArgs(CSVname, modelName): #This has to do with the non-destructive import. These arguments will be passed as the "default" dictionary in a get_or_create
if line[CSVname]:
- defaultArgs[modelName] = html_to_wiki(line[CSVname])
+ defaultArgs[modelName] = line[CSVname]
# The attributes added using "addToArgs" will be used to look up an existing cave. Those added using "addToDefaultArgs" will not.
addToArgs(KatasterNumber, "kataster_number")
@@ -103,6 +105,7 @@ def LoadCaveTab():
addToDefaultArgs(Extent, "extent")
addToDefaultArgs(SurvexFile, "survex_file")
addToDefaultArgs(Notes, "notes")
+ addToDefaultArgs(AutogenFile, "url")
if line[Area] == "1626":
if line[KatasterNumber] != "":
args["slug"] = line[Area] + "-" + line[KatasterNumber]
@@ -140,6 +143,7 @@ def LoadCaveTab():
newArea = models.Area(short_name = line[Area], parent = area1623)
newArea.save()
newCave.area.add(newArea)
+ newCave.area.add(area1623)
elif created:
newCave.area.add(area1623)
@@ -152,6 +156,7 @@ def LoadCaveTab():
newUnofficialName.save()
logging.info("Added unofficial name "+str(newUnofficialName)+" to cave "+str(newCave)+"\n")
+
if created and line[MultipleEntrances] == '' or \
line[MultipleEntrances] == 'entrance' or \
@@ -165,10 +170,10 @@ def LoadCaveTab():
def addToArgs(CSVname, modelName):
if line[CSVname]:
- args[modelName] = html_to_wiki(line[CSVname])
+ args[modelName] = line[CSVname]
def addToArgsViaDict(CSVname, modelName, dictionary):
if line[CSVname]:
- args[modelName] = dictionary[html_to_wiki(line[CSVname])]
+ args[modelName] = dictionary[line[CSVname]]
addToArgs(EntranceName, 'name')
addToArgs(Explorers, 'explorers')
addToArgs(Map, 'map_description')
@@ -188,6 +193,7 @@ def LoadCaveTab():
"Unmarked": "U",
"": "?",
})
+
addToArgs(MarkingComment, 'marking_comment')
addToArgsViaDict(Findability, 'findability', {"Surveyed": "S",
"Lost": "L",
@@ -200,20 +206,15 @@ def LoadCaveTab():
addToArgs(Northing, 'northing')
addToArgs(Altitude, 'alt')
addToArgs(DescriptionOfOtherPoint, 'other_description')
- def addToArgsSurveyStation(CSVname, modelName):
- if line[CSVname]:
- surveyPoint = models.SurveyStation(name = line[CSVname])
- surveyPoint.save()
- args[modelName] = surveyPoint
- addToArgsSurveyStation(TagPoint, 'tag_station')
- addToArgsSurveyStation(ExactEntrance, 'exact_station')
- addToArgsSurveyStation(OtherPoint, 'other_station')
+ addToArgs(TagPoint, 'tag_station')
+ addToArgs(ExactEntrance, 'exact_station')
+ addToArgs(OtherPoint, 'other_station')
addToArgs(OtherPoint, 'other_description')
if line[GPSpreSA]:
- addToArgsSurveyStation(GPSpreSA, 'other_station')
+ addToArgs(GPSpreSA, 'other_station')
args['other_description'] = 'pre selective availability GPS'
if line[GPSpostSA]:
- addToArgsSurveyStation(GPSpostSA, 'other_station')
+ addToArgs(GPSpostSA, 'other_station')
args['other_description'] = 'post selective availability GPS'
addToArgs(Bearings, 'bearings')
args['slug'] = newCave.slug + entrance_letter
@@ -227,6 +228,8 @@ def LoadCaveTab():
newCaveAndEntrance.save()
logging.info("Added CaveAndEntrance "+str(newCaveAndEntrance)+"\n")
+ f = flatpages.models.EntranceRedirect(originalURL = line[AutogenFile], entrance = newEntrance)
+ f.save()
# lookup function modelled on GetPersonExpeditionNameLookup
diff --git a/parsers/logbooks.py b/parsers/logbooks.py
index e6b553b..af01f46 100644
--- a/parsers/logbooks.py
+++ b/parsers/logbooks.py
@@ -146,7 +146,7 @@ def Parseloghtmltxt(year, expedition, txt):
for trippara in tripparas:
s = re.match('''(?x)(?:\s*<div\sclass="tripdate"\sid=".*?">.*?</div>\s*<p>)? # second date
- \s*(?:<a\s+id="(.*?)"\s*/>)?
+ \s*(?:<a\s+id="(.*?)"\s*/>\s*</a>)?
\s*<div\s+class="tripdate"\s*(?:id="(.*?)")?>(.*?)</div>(?:<p>)?
\s*<div\s+class="trippeople">\s*(.*?)</div>
\s*<div\s+class="triptitle">\s*(.*?)</div>
diff --git a/parsers/survex.py b/parsers/survex.py
index f8ec6d4..2e0b8d3 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -1,5 +1,8 @@
import troggle.settings as settings
import troggle.core.models as models
+import troggle.settings as settings
+
+from subprocess import call, Popen, PIPE
from troggle.parsers.people import GetPersonExpeditionNameLookup
import re
@@ -14,7 +17,13 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
if stardata["type"] == "normal":
- survexleg.tape = float(ls[stardata["tape"]])
+ try:
+ survexleg.tape = float(ls[stardata["tape"]])
+ except ValueError:
+ print "Tape misread in", survexblock.survexfile.path
+ print "Stardata:", stardata
+ print "Line:", ls
+ survexleg.tape = 1000
lclino = ls[stardata["clino"]]
lcompass = ls[stardata["compass"]]
if lclino == "up":
@@ -24,7 +33,13 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
survexleg.compass = 0.0
survexleg.clino = -90.0
elif lclino == "-" or lclino == "level":
- survexleg.compass = float(lcompass)
+ try:
+ survexleg.compass = float(lcompass)
+ except ValueError:
+ print "Compass misread in", survexblock.survexfile.path
+ print "Stardata:", stardata
+ print "Line:", ls
+ survexleg.compass = 1000
survexleg.clino = -90.0
else:
assert re.match("[\d\-+.]+$", lcompass), ls
@@ -37,7 +52,10 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
itape = stardata.get("tape")
if itape:
- survexblock.totalleglength += float(ls[itape])
+ try:
+ survexblock.totalleglength += float(ls[itape])
+ except ValueError:
+ print "Length not added"
survexblock.save()
def LoadSurvexEquate(survexblock, sline):
@@ -86,6 +104,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
elif stardata["type"] == "passage":
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
+ #Missing "station" in stardata.
continue
# detect the star command
@@ -151,8 +170,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
for i in range(0, len(ls)):
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
if ls[0] in ["normal", "cartesian", "nosurvey"]:
- assert "from" in stardata, line
- assert "to" in stardata, line
+ assert (("from" in stardata and "to" in stardata) or "station" in stardata), line
elif ls[0] == "default":
stardata = stardatadefault
else:
@@ -162,7 +180,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
LoadSurvexEquate(survexblock, sline)
else:
- assert cmd.lower() in [ "sd", "equate", "include", "units", "entrance", "fix", "data", "flags", "title", "export", "instrument", "calibrate", ], (cmd, line, survexblock)
+ assert cmd.lower() in [ "sd", "equate", "include", "units", "entrance", "fix", "data", "flags", "title", "export", "instrument", "calibrate", "set", "infer"], (cmd, line, survexblock)
@@ -186,6 +204,30 @@ def ReloadSurvexCave(survex_cave):
def LoadAllSurvexBlocks():
+ models.SurvexBlock.objects.all().delete()
+ models.SurvexFile.objects.all().delete()
+ models.SurvexDirectory.objects.all().delete()
+ models.SurvexEquate.objects.all().delete()
+ models.SurvexLeg.objects.all().delete()
+ models.SurvexTitle.objects.all().delete()
+ models.SurvexPersonRole.objects.all().delete()
+
+ survexfile = models.SurvexFile(path="all", cave=None)
+ survexfile.save()
+ survexfile.SetDirectory()
+
+ #Load all
+ survexblockroot = models.SurvexBlock(name="root", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0)
+ survexblockroot.save()
+ fin = survexfile.OpenFile()
+ textlines = [ ]
+ RecursiveLoad(survexblockroot, survexfile, fin, textlines)
+ survexblockroot.text = "".join(textlines)
+ survexblockroot.save()
+
+
+ #Load each cave,
+ #FIXME this should be dealt with load all above
caves = models.Cave.objects.all()
for cave in caves:
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves", cave.kataster_number)):
@@ -193,4 +235,20 @@ def LoadAllSurvexBlocks():
print "loading", cave
ReloadSurvexCave(cave.kataster_number)
-
+poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
+def LoadPos():
+ call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
+ call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA)
+ posfile = open("%sall.pos" % settings.SURVEX_DATA)
+ posfile.readline()#Drop header
+ for line in posfile.readlines():
+ r = poslineregex.match(line)
+ if r:
+ x, y, z, name = r.groups()
+ try:
+ ss = models.SurvexStation.objects.lookup(name)
+ except:
+ pass
+ ss.x = float(x)
+ ss.y = float(y)
+ ss.z = float(z)