summaryrefslogtreecommitdiffstats
path: root/parsers/surveys.py
diff options
context:
space:
mode:
authorexpo <expo@seagrass.goatchurch.org.uk>2009-09-14 23:09:50 +0100
committerexpo <expo@seagrass.goatchurch.org.uk>2009-09-14 23:09:50 +0100
commit2be3e4ce9d8943cfacc405127f9e08f9872b4ebf (patch)
tree02065bfa8fef9462665eeee4a0414e767a1bc1eb /parsers/surveys.py
parent1294444026718d2c3f46db2febafbe2b685b7a7d (diff)
downloadtroggle-2be3e4ce9d8943cfacc405127f9e08f9872b4ebf.tar.gz
troggle-2be3e4ce9d8943cfacc405127f9e08f9872b4ebf.tar.bz2
troggle-2be3e4ce9d8943cfacc405127f9e08f9872b4ebf.zip
get survey scans into database
Diffstat (limited to 'parsers/surveys.py')
-rw-r--r--parsers/surveys.py147
1 files changed, 25 insertions, 122 deletions
diff --git a/parsers/surveys.py b/parsers/surveys.py
index 8c06c9a..a67f8fe 100644
--- a/parsers/surveys.py
+++ b/parsers/surveys.py
@@ -1,4 +1,4 @@
-import sys, os, types, logging, stat
+import sys, os, types, logging
#sys.path.append('C:\\Expo\\expoweb')
#from troggle import *
#os.environ['DJANGO_SETTINGS_MODULE']='troggle.settings'
@@ -24,13 +24,12 @@ def get_or_create_placeholder(year):
placeholder_logbook_entry, newly_created = save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs)
return placeholder_logbook_entry
-# dead
def readSurveysFromCSV():
try: # could probably combine these two
surveytab = open(os.path.join(settings.SURVEY_SCANS, "Surveys.csv"))
except IOError:
import cStringIO, urllib
- surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "/Surveys.csv").read())
+ surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "Surveys.csv").read())
dialect=csv.Sniffer().sniff(surveytab.read())
surveytab.seek(0,0)
surveyreader = csv.reader(surveytab,dialect=dialect)
@@ -76,7 +75,6 @@ def readSurveysFromCSV():
logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
-# dead
def listdir(*directories):
try:
return os.listdir(os.path.join(settings.SURVEYS, *directories))
@@ -136,13 +134,11 @@ def parseSurveyScans(year, logfile=None):
continue
scanObj.save()
-# dead
def parseSurveys(logfile=None):
readSurveysFromCSV()
for year in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
parseSurveyScans(year)
-# dead
def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
file=Image.open(filePath)
print filePath
@@ -152,11 +148,10 @@ def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the
return False
-# handles url or file, so we can refer to a set of scans on another server
+# handles url or file
def GetListDir(sdir):
res = [ ]
if sdir[:7] == "http://":
- assert False, "Not written"
s = urllib.urlopen(sdir)
else:
for f in os.listdir(sdir):
@@ -165,136 +160,44 @@ def GetListDir(sdir):
res.append((f, ff, os.path.isdir(ff)))
return res
-
-
-
-
-def LoadListScansFile(survexscansfolder):
- gld = [ ]
-
- # flatten out any directories in these book files
- for (fyf, ffyf, fisdiryf) in GetListDir(survexscansfolder.fpath):
- if fisdiryf:
- gld.extend(GetListDir(ffyf))
- else:
- gld.append((fyf, ffyf, fisdiryf))
-
- for (fyf, ffyf, fisdiryf) in gld:
- assert not fisdiryf, ffyf
- if re.search("\.(?:png|jpg|jpeg)(?i)$", fyf):
- survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
- survexscansingle.save()
-
-
# this iterates through the scans directories (either here or on the remote server)
# and builds up the models we can access later
-def LoadListScans():
+def LoadListScans(surveyscansdir):
SurvexScanSingle.objects.all().delete()
SurvexScansFolder.objects.all().delete()
- # first do the smkhs (large kh survey scans) directory
- survexscansfoldersmkhs = SurvexScansFolder(fpath=os.path.join(settings.SURVEY_SCANS, "smkhs"), walletname="smkhs")
- if os.path.isdir(survexscansfoldersmkhs.fpath):
- survexscansfoldersmkhs.save()
- LoadListScansFile(survexscansfoldersmkhs)
-
-
- # iterate into the surveyscans directory
- for f, ff, fisdir in GetListDir(os.path.join(settings.SURVEY_SCANS, "surveyscans")):
+ for f, ff, fisdir in GetListDir(surveyscansdir):
if not fisdir:
continue
# do the year folders
if re.match("\d\d\d\d$", f):
for fy, ffy, fisdiry in GetListDir(ff):
- assert fisdiry, ffy
+ if not fisdiry:
+ assert fy == "index", ffy
+ continue
survexscansfolder = SurvexScansFolder(fpath=ffy, walletname=fy)
survexscansfolder.save()
- LoadListScansFile(survexscansfolder)
-
- # do the
+ for fyf, ffyf, fisdiryf in GetListDir(ffy):
+ assert not fisdiryf, ffyf
+ survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
+ survexscansingle.save()
elif f != "thumbs":
survexscansfolder = SurvexScansFolder(fpath=ff, walletname=f)
survexscansfolder.save()
- LoadListScansFile(survexscansfolder)
+ gld = [ ]
+
+ # flatten out any directories in these book files
+ for (fyf, ffyf, fisdiryf) in GetListDir(ff):
+ if fisdiryf:
+ gld.extend(GetListDir(ffyf))
+ else:
+ gld.append((fyf, ffyf, fisdiryf))
+
+ for (fyf, ffyf, fisdiryf) in gld:
+ assert not fisdiryf, ffyf
+ survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
+ survexscansingle.save()
-
-def FindTunnelScan(tunnelfile, path):
- scansfolder, scansfile = None, None
- mscansdir = re.search("(\d\d\d\d#\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg))$", path)
- if mscansdir:
- scansfolderl = SurvexScansFolder.objects.filter(walletname=mscansdir.group(1))
- if len(scansfolderl):
- assert len(scansfolderl) == 1
- scansfolder = scansfolderl[0]
- if scansfolder:
- scansfilel = scansfolder.survexscansingle_set.filter(name=mscansdir.group(2))
- if len(scansfilel):
- assert len(scansfilel) == 1
- scansfile = scansfilel[0]
- if scansfolder:
- tunnelfile.survexscansfolders.add(scansfolder)
- if scansfile:
- tunnelfile.survexscans.add(scansfile)
-
- elif path and not re.search("\.(?:png|jpg)$(?i)", path):
- name = os.path.split(path)[1]
- print "ttt", tunnelfile.tunnelpath, path, name
- rtunnelfilel = TunnelFile.objects.filter(tunnelname=name)
- if len(rtunnelfilel):
- assert len(rtunnelfilel) == 1, ("two paths with name of", path, "need more discrimination coded")
- rtunnelfile = rtunnelfilel[0]
- #print "ttt", tunnelfile.tunnelpath, path, name, rtunnelfile.tunnelpath
- tunnelfile.tunnelcontains.add(rtunnelfile)
-
- tunnelfile.save()
-
-
-def SetTunnelfileInfo(tunnelfile):
- ff = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
- tunnelfile.filesize = os.stat(ff)[stat.ST_SIZE]
- fin = open(ff)
- ttext = fin.read()
- fin.close()
-
- mtype = re.search("<(fontcolours|sketch)", ttext)
- assert mtype, ff
- tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
- tunnelfile.npaths = len(re.findall("<skpath", ttext))
- tunnelfile.save()
-
- # <tunnelxml tunnelversion="version2009-06-21 Matienzo" tunnelproject="ireby" tunneluser="goatchurch" tunneldate="2009-06-29 23:22:17">
- # <pcarea area_signal="frame" sfscaledown="12.282584" sfrotatedeg="-90.76982" sfxtrans="11.676667377221136" sfytrans="-15.677173422877454" sfsketch="204description/scans/plan(38).png" sfstyle="" nodeconnzsetrelative="0.0">
- for path, style in re.findall('<pcarea area_signal="frame".*?sfsketch="([^"]*)" sfstyle="([^"]*)"', ttext):
- FindTunnelScan(tunnelfile, path)
-
- # should also scan and look for survex blocks that might have been included
- # and also survex titles as well.
-
- tunnelfile.save()
-
-
-def LoadTunnelFiles():
- tunneldatadir = settings.TUNNEL_DATA
- TunnelFile.objects.all().delete()
- tunneldirs = [ "" ]
- while tunneldirs:
- tunneldir = tunneldirs.pop()
- for f in os.listdir(os.path.join(tunneldatadir, tunneldir)):
- if f[0] == "." or f[-1] == "~":
- continue
- lf = os.path.join(tunneldir, f)
- ff = os.path.join(tunneldatadir, lf)
- if os.path.isdir(ff):
- tunneldirs.append(lf)
- elif f[-4:] == ".xml":
- tunnelfile = TunnelFile(tunnelpath=lf, tunnelname=os.path.split(f[:-4])[1])
- tunnelfile.save()
-
- for tunnelfile in TunnelFile.objects.all():
- SetTunnelfileInfo(tunnelfile)
-
-
-