summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--core/admin.py9
-rw-r--r--core/models.py31
-rw-r--r--core/models_survex.py25
-rw-r--r--core/view_surveys.py14
-rw-r--r--core/views_caves.py225
-rw-r--r--core/views_logbooks.py8
-rw-r--r--core/views_survex.py81
-rw-r--r--databaseReset.py19
-rw-r--r--docker/Dockerfile4
l---------docker/requirements.txt2
-rw-r--r--docker/requirements.txt.dj-1.1011
-rw-r--r--docker/requirements.txt.dj-1.7.114
-rw-r--r--flatpages/migrations/0001_initial.py34
-rw-r--r--flatpages/migrations/__init__.py0
-rw-r--r--imagekit/__init__.py13
-rw-r--r--imagekit/defaults.py21
-rw-r--r--imagekit/lib.py17
-rw-r--r--imagekit/management/__init__.py1
-rw-r--r--imagekit/management/commands/__init__.py1
-rw-r--r--imagekit/management/commands/ikflush.py38
-rw-r--r--imagekit/models.py136
-rw-r--r--imagekit/options.py23
-rw-r--r--imagekit/processors.py134
-rw-r--r--imagekit/specs.py119
-rw-r--r--imagekit/tests.py86
-rw-r--r--imagekit/utils.py15
-rw-r--r--media/js/survey.js4
-rw-r--r--parsers/logbooks.py2
-rw-r--r--parsers/people.py22
-rw-r--r--parsers/survex.py108
-rw-r--r--parsers/surveys.py26
-rw-r--r--profiles/urls.py4
-rw-r--r--profiles/utils.py5
-rw-r--r--settings.py38
-rw-r--r--templates/base.html4
-rw-r--r--templates/expedition.html10
-rw-r--r--urls.py72
-rw-r--r--utils.py4
38 files changed, 451 insertions, 919 deletions
diff --git a/core/admin.py b/core/admin.py
index 71bbd61..ca38cfb 100644
--- a/core/admin.py
+++ b/core/admin.py
@@ -118,24 +118,27 @@ class EntranceAdmin(TroggleModelAdmin):
admin.site.register(DPhoto)
admin.site.register(Cave, CaveAdmin)
+admin.site.register(CaveSlug)
admin.site.register(Area)
#admin.site.register(OtherCaveName)
admin.site.register(CaveAndEntrance)
admin.site.register(NewSubCave)
admin.site.register(CaveDescription)
admin.site.register(Entrance, EntranceAdmin)
-admin.site.register(SurvexBlock, SurvexBlockAdmin)
admin.site.register(Expedition)
admin.site.register(Person,PersonAdmin)
-admin.site.register(SurvexPersonRole)
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
admin.site.register(LogbookEntry, LogbookEntryAdmin)
#admin.site.register(PersonTrip)
admin.site.register(QM, QMAdmin)
admin.site.register(Survey, SurveyAdmin)
admin.site.register(ScannedImage)
-admin.site.register(SurvexStation)
+admin.site.register(SurvexDirectory)
+admin.site.register(SurvexFile)
+admin.site.register(SurvexStation)
+admin.site.register(SurvexBlock)
+admin.site.register(SurvexPersonRole)
admin.site.register(SurvexScansFolder)
admin.site.register(SurvexScanSingle)
diff --git a/core/models.py b/core/models.py
index f65efed..9529582 100644
--- a/core/models.py
+++ b/core/models.py
@@ -153,7 +153,7 @@ class Person(TroggleModel):
for personexpedition in self.personexpedition_set.all():
if not personexpedition.is_guest:
- print(personexpedition.expedition.year)
+ # print(personexpedition.expedition.year)
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
return notability
@@ -248,7 +248,7 @@ class LogbookEntry(TroggleModel):
("html", "Html style logbook")
)
- date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
+ date = models.DateTimeField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
@@ -377,12 +377,14 @@ class CaveSlug(models.Model):
cave = models.ForeignKey('Cave')
slug = models.SlugField(max_length=50, unique = True)
primary = models.BooleanField(default=False)
-
+
+ def __unicode__(self):
+ return self.slug
class Cave(TroggleModel):
# too much here perhaps,
official_name = models.CharField(max_length=160)
- area = models.ManyToManyField(Area, blank=True, null=True)
+ area = models.ManyToManyField(Area, blank=True)
kataster_code = models.CharField(max_length=20,blank=True,null=True)
kataster_number = models.CharField(max_length=10,blank=True, null=True)
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
@@ -535,13 +537,15 @@ class Cave(TroggleModel):
def getCaveByReference(reference):
areaname, code = reference.split("-", 1)
- print(areaname, code)
+ #print(areaname, code)
area = Area.objects.get(short_name = areaname)
- print(area)
+ #print(area)
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
print(list(foundCaves))
- assert len(foundCaves) == 1
- return foundCaves[0]
+ if len(foundCaves) == 1:
+ return foundCaves[0]
+ else:
+ return False
class OtherCaveName(TroggleModel):
name = models.CharField(max_length=160)
@@ -706,9 +710,9 @@ class CaveDescription(TroggleModel):
short_name = models.CharField(max_length=50, unique = True)
long_name = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True,null=True)
- linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
- linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
- linked_qms = models.ManyToManyField("QM", blank=True,null=True)
+ linked_subcaves = models.ManyToManyField("NewSubCave", blank=True)
+ linked_entrances = models.ManyToManyField("Entrance", blank=True)
+ linked_qms = models.ManyToManyField("QM", blank=True)
def __unicode__(self):
if self.long_name:
@@ -782,7 +786,7 @@ photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=set
class DPhoto(TroggleImageModel):
caption = models.CharField(max_length=1000,blank=True,null=True)
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
- contains_person = models.ManyToManyField(Person,blank=True,null=True)
+ contains_person = models.ManyToManyField(Person,blank=True)
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
is_mugshot = models.BooleanField(default=False)
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
@@ -856,8 +860,9 @@ class Survey(TroggleModel):
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
+
def __unicode__(self):
- return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
+ return self.expedition.year+"#" + "%s%02d" % (self.wallet_letter, int(self.wallet_number))
def notes(self):
return self.scannedimage_set.filter(contents='notes')
diff --git a/core/models_survex.py b/core/models_survex.py
index 3bae04c..e78cd48 100644
--- a/core/models_survex.py
+++ b/core/models_survex.py
@@ -18,10 +18,13 @@ class SurvexDirectory(models.Model):
cave = models.ForeignKey('Cave', blank=True, null=True)
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True)
# could also include files in directory but not referenced
-
+
+ def __unicode__(self):
+ return self.path
+
class Meta:
- ordering = ('id',)
-
+ ordering = ('path',)
+
class SurvexFile(models.Model):
path = models.CharField(max_length=200)
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True)
@@ -29,6 +32,9 @@ class SurvexFile(models.Model):
class Meta:
ordering = ('id',)
+
+ def __unicode__(self):
+ return self.path + '.svx' or 'no file'
def exists(self):
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
@@ -66,6 +72,9 @@ class SurvexStation(models.Model):
x = models.FloatField(blank=True, null=True)
y = models.FloatField(blank=True, null=True)
z = models.FloatField(blank=True, null=True)
+
+ def __unicode__(self):
+ return self.block.cave.slug() + '/' + self.block.name + '/' + self.name or 'No station name'
def path(self):
r = self.name
@@ -109,7 +118,7 @@ class SurvexBlock(models.Model):
text = models.TextField()
cave = models.ForeignKey('Cave', blank=True, null=True)
- date = models.DateField(blank=True, null=True)
+ date = models.DateTimeField(blank=True, null=True)
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
expedition = models.ForeignKey('Expedition', blank=True, null=True)
@@ -177,7 +186,7 @@ ROLE_CHOICES = (
class SurvexPersonRole(models.Model):
survexblock = models.ForeignKey('SurvexBlock')
nrole = models.CharField(choices=ROLE_CHOICES, max_length=200, blank=True, null=True)
- # increasing levels of precision
+ # increasing levels of precision
personname = models.CharField(max_length=100)
person = models.ForeignKey('Person', blank=True, null=True)
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
@@ -194,6 +203,9 @@ class SurvexScansFolder(models.Model):
class Meta:
ordering = ('walletname',)
+
+ def __unicode__(self):
+ return self.walletname or 'no wallet'
def get_absolute_url(self):
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
@@ -205,6 +217,9 @@ class SurvexScanSingle(models.Model):
class Meta:
ordering = ('name',)
+
+ def __unicode__(self):
+ return self.survexscansfolder.walletname + '/' + self.name
def get_absolute_url(self):
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
diff --git a/core/view_surveys.py b/core/view_surveys.py
index aad48c3..04c6ca6 100644
--- a/core/view_surveys.py
+++ b/core/view_surveys.py
@@ -1,6 +1,6 @@
from django.conf import settings
import fileAbstraction
-from django.shortcuts import render_to_response
+from django.shortcuts import render
from django.http import HttpResponse, Http404
import os, stat
import re
@@ -86,7 +86,7 @@ def jgtfile(request, f):
upperdirs.append((href, hpath))
upperdirs.append(("", "/"))
- return render_to_response('listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
+ return render(request, 'listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
# flat output of file when loaded
if os.path.isfile(fp):
@@ -163,27 +163,27 @@ def jgtuploadfile(request):
#print ("FFF", request.FILES.values())
message = ""
print "gothere"
- return render_to_response('fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
+ return render(request, 'fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
def surveyscansfolder(request, path):
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
- return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
+ return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
def surveyscansingle(request, path, file):
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
- #return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
+ #return render(request, 'survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
def surveyscansfolders(request):
survexscansfolders = SurvexScansFolder.objects.all()
- return render_to_response('survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
+ return render(request, 'survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
def tunneldata(request):
tunnelfiles = TunnelFile.objects.all()
- return render_to_response('tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
+ return render(request, 'tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
def tunnelfile(request, path):
diff --git a/core/views_caves.py b/core/views_caves.py
index af76b1e..7b0555f 100644
--- a/core/views_caves.py
+++ b/core/views_caves.py
@@ -37,7 +37,7 @@ def numericalcmp(x, y):
-def caveCmp(x, y):
+def caveCmp(x, y):
if x.kataster_number:
if y.kataster_number:
return numericalcmp(x.kataster_number, y.kataster_number) # Note that cave kataster numbers are not generally integers.
@@ -240,7 +240,7 @@ def entranceSlug(request, slug):
def survexblock(request, survexpath):
survexpath = re.sub("/", ".", survexpath)
- print "jjjjjj", survexpath
+ print("jjjjjj", survexpath)
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
#ftext = survexblock.filecontents()
ftext = survexblock.text
@@ -277,30 +277,30 @@ def get_qms(request, caveslug):
return render(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
areanames = [
- #('', 'Location unclear'),
- ('1a', '1a – Plateau: around Top Camp'),
- ('1b', '1b – Western plateau near 182'),
- ('1c', '1c – Eastern plateau near 204 walk-in path'),
- ('1d', '1d – Further plateau around 76'),
- ('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
- ('2b', '2b – Eishöhle area'),
- ('2b or 4 (unclear)', '2b or 4 (unclear)'),
- ('2c', '2c – Kaninchenhöhle area'),
- ('2d', '2d – Steinbrückenhöhle area'),
- ('3', '3 – Bräuning Alm'),
- ('4', '4 – Kratzer valley'),
- ('5', '5 – Schwarzmoos-Wildensee'),
- ('6', '6 – Far plateau'),
- ('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
- ('7', '7 – Egglgrube'),
- ('8a', '8a – Loser south face'),
- ('8b', '8b – Loser below Dimmelwand'),
- ('8c', '8c – Augst See'),
- ('8d', '8d – Loser-Hochganger ridge'),
- ('9', '9 – Gschwandt Alm'),
- ('10', '10 – Altaussee'),
- ('11', '11 – Augstbach')
- ]
+ #('', 'Location unclear'),
+ ('1a', '1a – Plateau: around Top Camp'),
+ ('1b', '1b – Western plateau near 182'),
+ ('1c', '1c – Eastern plateau near 204 walk-in path'),
+ ('1d', '1d – Further plateau around 76'),
+ ('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
+ ('2b', '2b – Eishöhle area'),
+ ('2b or 4 (unclear)', '2b or 4 (unclear)'),
+ ('2c', '2c – Kaninchenhöhle area'),
+ ('2d', '2d – Steinbrückenhöhle area'),
+ ('3', '3 – Bräuning Alm'),
+ ('4', '4 – Kratzer valley'),
+ ('5', '5 – Schwarzmoos-Wildensee'),
+ ('6', '6 – Far plateau'),
+ ('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
+ ('7', '7 – Egglgrube'),
+ ('8a', '8a – Loser south face'),
+ ('8b', '8b – Loser below Dimmelwand'),
+ ('8c', '8c – Augst See'),
+ ('8d', '8d – Loser-Hochganger ridge'),
+ ('9', '9 – Gschwandt Alm'),
+ ('10', '10 – Altaussee'),
+ ('11', '11 – Augstbach')
+ ]
def prospecting(request):
@@ -318,21 +318,21 @@ def prospecting(request):
# big map first (zoom factor ignored)
maps = {
-# id left top right bottom zoom
-# G&K G&K G&K G&K factor
-"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
- "All"],
-"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
- "Eishöhle"],
-"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
- "Eislufthöhle"],
-"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
- "Steinbrückenhöhle"],
-"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
- "Near Top Camp"],
+# id left top right bottom zoom
+# G&K G&K G&K G&K factor
+"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
+ "All"],
+"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
+ "Eishöhle"],
+"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
+ "Eislufthöhle"],
+"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
+ "Steinbrückenhöhle"],
+"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
+ "Near Top Camp"],
"grieß":
- [36000.0, 86300.0, 38320.0, 84400.0, 4.0,
- "Grießkogel Area"],
+ [36000.0, 86300.0, 38320.0, 84400.0, 4.0,
+ "Grießkogel Area"],
}
for n in maps.keys():
@@ -353,50 +353,50 @@ ZOOM = 4
DESC = 5
areacolours = {
- '1a' : '#00ffff',
- '1b' : '#ff00ff',
- '1c' : '#ffff00',
- '1d' : '#ffffff',
- '2a' : '#ff0000',
- '2b' : '#00ff00',
- '2c' : '#008800',
- '2d' : '#ff9900',
- '3' : '#880000',
- '4' : '#0000ff',
- '6' : '#000000', # doubles for surface fixed pts, and anything else
- '7' : '#808080'
- }
+ '1a' : '#00ffff',
+ '1b' : '#ff00ff',
+ '1c' : '#ffff00',
+ '1d' : '#ffffff',
+ '2a' : '#ff0000',
+ '2b' : '#00ff00',
+ '2c' : '#008800',
+ '2d' : '#ff9900',
+ '3' : '#880000',
+ '4' : '#0000ff',
+ '6' : '#000000', # doubles for surface fixed pts, and anything else
+ '7' : '#808080'
+ }
for FONT in [
- "/usr/share/fonts/truetype/freefont/FreeSans.ttf",
- "/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
- "C:\WINNT\Fonts\ARIAL.TTF"
- ]:
- if os.path.isfile(FONT): break
+ "/usr/share/fonts/truetype/freefont/FreeSans.ttf",
+ "/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
+ "C:\WINNT\Fonts\ARIAL.TTF"
+ ]:
+ if os.path.isfile(FONT): break
TEXTSIZE = 16
CIRCLESIZE =8
LINEWIDTH = 2
myFont = ImageFont.truetype(FONT, TEXTSIZE)
def mungecoord(x, y, mapcode, img):
- # Top of Zinken is 73 1201 = dataset 34542 81967
- # Top of Hinter is 1073 562 = dataset 36670 83317
- # image is 1417 by 2201
- # FACTOR1 = 1000.0 / (36670.0-34542.0)
- # FACTOR2 = (1201.0-562.0) / (83317 - 81967)
- # FACTOR = (FACTOR1 + FACTOR2)/2
- # The factors aren't the same as the scanned map's at a slight angle. I
- # can't be bothered to fix this. Since we zero on the Hinter it makes
- # very little difference for caves in the areas round 76 or 204.
- # xoffset = (x - 36670)*FACTOR
- # yoffset = (y - 83317)*FACTOR
- # return (1073 + xoffset, 562 - yoffset)
-
- m = maps[mapcode]
- factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
- return ((x - m[L]) * factorX, (m[T] - y) * factorY)
-
+ # Top of Zinken is 73 1201 = dataset 34542 81967
+ # Top of Hinter is 1073 562 = dataset 36670 83317
+ # image is 1417 by 2201
+ # FACTOR1 = 1000.0 / (36670.0-34542.0)
+ # FACTOR2 = (1201.0-562.0) / (83317 - 81967)
+ # FACTOR = (FACTOR1 + FACTOR2)/2
+ # The factors aren't the same as the scanned map's at a slight angle. I
+ # can't be bothered to fix this. Since we zero on the Hinter it makes
+ # very little difference for caves in the areas round 76 or 204.
+ # xoffset = (x - 36670)*FACTOR
+ # yoffset = (y - 83317)*FACTOR
+ # return (1073 + xoffset, 562 - yoffset)
+
+ m = maps[mapcode]
+ factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
+ return ((x - m[L]) * factorX, (m[T] - y) * factorY)
+
COL_TYPES = {True: "red",
False: "#dddddd",
"Reference": "#dddddd"}
@@ -422,40 +422,40 @@ def prospecting_image(request, name):
m = maps[name]
#imgmaps = []
if name == "all":
- img = mainImage
+ img = mainImage
else:
- M = maps['all']
- W, H = mainImage.size
- l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
- t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
- r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
- b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
- img = mainImage.crop((l, t, r, b))
- w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
- h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
- img = img.resize((w, h), Image.BICUBIC)
+ M = maps['all']
+ W, H = mainImage.size
+ l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
+ t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
+ r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
+ b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
+ img = mainImage.crop((l, t, r, b))
+ w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
+ h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
+ img = img.resize((w, h), Image.BICUBIC)
draw = ImageDraw.Draw(img)
draw.setfont(myFont)
if name == "all":
for maparea in maps.keys():
- if maparea == "all":
- continue
- localm = maps[maparea]
- l,t = mungecoord(localm[L], localm[T], "all", img)
- r,b = mungecoord(localm[R], localm[B], "all", img)
- text = maparea + " map"
- textlen = draw.textsize(text)[0] + 3
- draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
- draw.text((l+2, t+1), text, fill="#000000")
- #imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
- draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
- draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
- draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
- draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
- draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
- draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
- draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
- draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
+ if maparea == "all":
+ continue
+ localm = maps[maparea]
+ l,t = mungecoord(localm[L], localm[T], "all", img)
+ r,b = mungecoord(localm[R], localm[B], "all", img)
+ text = maparea + " map"
+ textlen = draw.textsize(text)[0] + 3
+ draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
+ draw.text((l+2, t+1), text, fill="#000000")
+ #imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
+ draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
+ draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
+ draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
+ draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
+ draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
+ draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
+ draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
+ draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
#imgmaps[maparea] = []
# Draw scale bar
m100 = int(100 / (m[R] - m[L]) * img.size[0])
@@ -477,14 +477,15 @@ def prospecting_image(request, name):
plot("laser.0_5", "LSR5", "Reference", "Laser Point 0/5", name, draw, img)
plot("225-96", "BAlm", "Reference", "Bräuning Alm trig point", name, draw, img)
for entrance in Entrance.objects.all():
- station = entrance.best_station()
- if station:
- #try:
- areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
- plot(station, "%s-%s" % (areaName, str(entrance)[5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
- #except:
- # pass
-
+ station = entrance.best_station()
+ if station:
+ #try:
+ areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
+ plot(station, "%s-%s" % (areaName, str(entrance)
+ [5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
+ #except:
+ # pass
+
for (N, E, D, num) in [(35975.37, 83018.21, 100,"177"), # Calculated from bearings
(35350.00, 81630.00, 50, "71"), # From Auer map
(36025.00, 82475.00, 50, "146"), # From mystery map
diff --git a/core/views_logbooks.py b/core/views_logbooks.py
index 1bc709c..ef920d2 100644
--- a/core/views_logbooks.py
+++ b/core/views_logbooks.py
@@ -70,13 +70,17 @@ def expedition(request, expeditionname):
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
logbook_entry__date=date) }
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
- date = date))
+ date=date))
prow.append(pcell)
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
if "reload" in request.GET:
LoadLogbookForExpedition(this_expedition)
- return render(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'settings':settings, 'dateditems': dateditems })
+ return render(request,'expedition.html', {'this_expedition': this_expedition,
+ 'expeditions':expeditions,
+ 'personexpeditiondays':personexpeditiondays,
+ 'settings':settings,
+ 'dateditems': dateditems })
def get_absolute_url(self):
return ('expedition', (expedition.year))
diff --git a/core/views_survex.py b/core/views_survex.py
index 1e6c1bf..2582e40 100644
--- a/core/views_survex.py
+++ b/core/views_survex.py
@@ -1,7 +1,8 @@
from django import forms
from django.http import HttpResponseRedirect, HttpResponse
-from django.shortcuts import render_to_response, render
-from django.core.context_processors import csrf
+from django.shortcuts import render
+from django.views.decorators import csrf
+from django.views.decorators.csrf import csrf_protect
from django.http import HttpResponse, Http404
import re
import os
@@ -39,9 +40,9 @@ survextemplatefile = """; Locn: Totes Gebirge, Austria - Loser/Augst-Eck Plateau
*data passage station left right up down ignoreall
1 [L] [R] [U] [D] comment
-*end [surveyname]"""
-
-
+*end [surveyname]"""
+
+
def ReplaceTabs(stext):
res = [ ]
nsl = 0
@@ -110,7 +111,7 @@ class SvxForm(forms.Form):
log = re.sub("(?s).*?(Survey contains)", "\\1", log)
return log
-
+@csrf_protect
def svx(request, survex_file):
# get the basic data from the file given in the URL
dirname = os.path.split(survex_file)[0]
@@ -173,10 +174,10 @@ def svx(request, survex_file):
'difflist': difflist,
'logmessage':logmessage,
'form':form}
- vmap.update(csrf(request))
+ # vmap.update(csrf(request))
if outputtype == "ajax":
- return render_to_response('svxfiledifflistonly.html', vmap)
- return render_to_response('svxfile.html', vmap)
+ return render(request, 'svxfiledifflistonly.html', vmap)
+ return render(request, 'svxfile.html', vmap)
def svxraw(request, survex_file):
svx = open(os.path.join(settings.SURVEX_DATA, survex_file+".svx"), "rb")
@@ -200,19 +201,19 @@ def threed(request, survex_file):
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
return HttpResponse(log, content_type="text")
+
def log(request, survex_file):
process(survex_file)
log = open(settings.SURVEX_DATA + survex_file + ".log", "rb")
return HttpResponse(log, content_type="text")
+
def err(request, survex_file):
process(survex_file)
err = open(settings.SURVEX_DATA + survex_file + ".err", "rb")
return HttpResponse(err, content_type="text")
-
-
def identifycavedircontents(gcavedir):
# find the primary survex file in each cave directory
name = os.path.split(gcavedir)[1]
@@ -252,37 +253,49 @@ def identifycavedircontents(gcavedir):
if primesvx:
subsvx.insert(0, primesvx)
return subdirs, subsvx
-
-
+
# direct local non-database browsing through the svx file repositories
# perhaps should use the database and have a reload button for it
def survexcaveslist(request):
- cavesdir = os.path.join(settings.SURVEX_DATA, "caves-1623")
- #cavesdircontents = { }
-
- onefilecaves = [ ]
- multifilecaves = [ ]
- subdircaves = [ ]
+ kat_areas = settings.KAT_AREAS
+
+ fnumlist = []
+
+ kat_areas = ['1623']
+
+ for area in kat_areas:
+ print(area)
+ cavesdir = os.path.join(settings.SURVEX_DATA, "caves-%s" % area)
+ print(cavesdir)
+ #cavesdircontents = { }
+ fnumlist += [ (-int(re.match(r"\d*", f).group(0) or "0"), f, area) for f in os.listdir(cavesdir) ]
+ print(fnumlist)
+ print(len(fnumlist))
# first sort the file list
- fnumlist = [ (-int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir) ]
fnumlist.sort()
+ onefilecaves = [ ]
+ multifilecaves = [ ]
+ subdircaves = [ ]
+
print(fnumlist)
# go through the list and identify the contents of each cave directory
- for num, cavedir in fnumlist:
+ for num, cavedir, area in fnumlist:
if cavedir in ["144", "40"]:
continue
-
+
+ cavesdir = os.path.join(settings.SURVEX_DATA, "caves-%s" % area)
+
gcavedir = os.path.join(cavesdir, cavedir)
if os.path.isdir(gcavedir) and cavedir[0] != ".":
subdirs, subsvx = identifycavedircontents(gcavedir)
survdirobj = [ ]
for lsubsvx in subsvx:
- survdirobj.append(("caves-1623/"+cavedir+"/"+lsubsvx, lsubsvx))
+ survdirobj.append(("caves-" + area + "/"+cavedir+"/"+lsubsvx, lsubsvx))
# caves with subdirectories
if subdirs:
@@ -292,7 +305,7 @@ def survexcaveslist(request):
assert not dsubdirs
lsurvdirobj = [ ]
for lsubsvx in dsubsvx:
- lsurvdirobj.append(("caves-1623/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
+ lsurvdirobj.append(("caves-" + area + "/"+cavedir+"/"+subdir+"/"+lsubsvx, lsubsvx))
subsurvdirs.append((lsurvdirobj[0], lsurvdirobj[1:]))
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
@@ -304,24 +317,22 @@ def survexcaveslist(request):
#print("survdirobj = ")
#print(survdirobj)
onefilecaves.append(survdirobj[0])
-
- return render_to_response('svxfilecavelist.html', {'settings': settings, "onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
-
-
-
+ return render(request, 'svxfilecavelist.html', {"onefilecaves":onefilecaves, "multifilecaves":multifilecaves, "subdircaves":subdircaves })
# parsing all the survex files of a single cave and showing that it's consistent and can find all the files and people
# doesn't use recursion. just writes it twice
def survexcavesingle(request, survex_cave):
breload = False
- cave = Cave.objects.get(kataster_number=survex_cave)
+ cave = Cave.objects.filter(kataster_number=survex_cave)
+ if len(cave) < 1:
+ cave = Cave.objects.filter(unofficial_number=survex_cave)
+
if breload:
parsers.survex.ReloadSurvexCave(survex_cave)
- return render_to_response('svxcavesingle.html', {'settings': settings, "cave":cave })
+ if len(cave) > 0:
+ return render(request, 'svxcavesingle.html', {"cave":cave[0] })
+ else:
+ return render(request, 'svxcavesingle.html', {"cave":cave })
-
-
-
-
diff --git a/databaseReset.py b/databaseReset.py
index fd9b83a..49e4e1b 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -3,6 +3,11 @@ import time
import settings
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
+
+if __name__ == '__main__':
+ import django
+ django.setup()
+
from django.core import management
from django.db import connection
from django.contrib.auth.models import User
@@ -28,7 +33,7 @@ def reload_db():
cursor.execute("CREATE DATABASE %s" % databasename)
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
cursor.execute("USE %s" % databasename)
- management.call_command('syncdb', interactive=False)
+ management.call_command('migrate', interactive=False)
user = User.objects.create_user(expouser, expouseremail, expouserpass)
user.is_staff = True
user.is_superuser = True
@@ -90,14 +95,16 @@ def reset():
import_caves()
import_people()
import_surveyscans()
- import_survex()
+
import_logbooks()
import_QMs()
+
+ import_survex()
try:
import_tunnelfiles()
except:
print("Tunnel files parser broken.")
-
+
import_surveys()
@@ -146,7 +153,7 @@ def dumplogbooks():
persons = [{"name": get_name(pt.personexpedition), "TU": pt.time_underground, "author": pt.is_logbook_entry_author} for pt in pts]
f = open(filename, "wb")
template = loader.get_template('dataformat/logbookentry.html')
- context = Context({'trip': trip,
+ context = Context({'trip': trip,
'persons': persons,
'date': dateStr,
'expeditionyear': lbe.expedition.year})
@@ -239,7 +246,3 @@ if __name__ == "__main__":
else:
print("%s not recognised" % sys.argv)
usage()
-
-
-
-
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 5f45b8e..2b4882a 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -2,7 +2,9 @@ FROM python:2.7-stretch
#COPY backports.list /etc/apt/sources.list.d/
-RUN apt-get -y update && apt-get install -y mercurial fonts-freefont-ttf locales survex
+RUN apt-get -y update && apt-get install -y mercurial \
+ fonts-freefont-ttf locales survex python-levenshtein \
+ python-pygraphviz
#RUN apt-get -y -t -backports install survex
diff --git a/docker/requirements.txt b/docker/requirements.txt
index d561bd8..e5006a4 120000
--- a/docker/requirements.txt
+++ b/docker/requirements.txt
@@ -1 +1 @@
-requirements.txt.dj-1.7.11 \ No newline at end of file
+requirements.txt.dj-1.10 \ No newline at end of file
diff --git a/docker/requirements.txt.dj-1.10 b/docker/requirements.txt.dj-1.10
new file mode 100644
index 0000000..603aa25
--- /dev/null
+++ b/docker/requirements.txt.dj-1.10
@@ -0,0 +1,11 @@
+Django==1.10.8
+django-registration==2.1.2
+mysql
+django-imagekit
+Image
+django-tinymce
+smartencoding
+fuzzywuzzy
+GitPython
+unidecode
+django-extensions \ No newline at end of file
diff --git a/docker/requirements.txt.dj-1.7.11 b/docker/requirements.txt.dj-1.7.11
index ae0a7ad..90cc380 100644
--- a/docker/requirements.txt.dj-1.7.11
+++ b/docker/requirements.txt.dj-1.7.11
@@ -6,3 +6,7 @@ django-imagekit
Image
django-tinymce==2.7.0
smartencoding
+fuzzywuzzy
+GitPython
+unidecode
+django-extensions
diff --git a/flatpages/migrations/0001_initial.py b/flatpages/migrations/0001_initial.py
new file mode 100644
index 0000000..c292e88
--- /dev/null
+++ b/flatpages/migrations/0001_initial.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.8 on 2019-07-14 19:45
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ('core', '0003_auto_20190714_2029'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='EntranceRedirect',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('originalURL', models.CharField(max_length=200)),
+ ('entrance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Entrance')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Redirect',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('originalURL', models.CharField(max_length=200, unique=True)),
+ ('newURL', models.CharField(max_length=200)),
+ ],
+ ),
+ ]
diff --git a/flatpages/migrations/__init__.py b/flatpages/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/flatpages/migrations/__init__.py
diff --git a/imagekit/__init__.py b/imagekit/__init__.py
deleted file mode 100644
index 2965bbd..0000000
--- a/imagekit/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-"""
-
-Django ImageKit
-
-Author: Justin Driscoll <justin.driscoll@gmail.com>
-Version: 0.2
-
-"""
-VERSION = "0.2"
-
-
-
- \ No newline at end of file
diff --git a/imagekit/defaults.py b/imagekit/defaults.py
deleted file mode 100644
index e1a05f6..0000000
--- a/imagekit/defaults.py
+++ /dev/null
@@ -1,21 +0,0 @@
-""" Default ImageKit configuration """
-
-from imagekit.specs import ImageSpec
-from imagekit import processors
-
-class ResizeThumbnail(processors.Resize):
- width = 100
- height = 50
- crop = True
-
-class EnhanceSmall(processors.Adjustment):
- contrast = 1.2
- sharpness = 1.1
-
-class SampleReflection(processors.Reflection):
- size = 0.5
- background_color = "#000000"
-
-class DjangoAdminThumbnail(ImageSpec):
- access_as = 'admin_thumbnail'
- processors = [ResizeThumbnail, EnhanceSmall, SampleReflection]
diff --git a/imagekit/lib.py b/imagekit/lib.py
deleted file mode 100644
index 65646a4..0000000
--- a/imagekit/lib.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Required PIL classes may or may not be available from the root namespace
-# depending on the installation method used.
-try:
- import Image
- import ImageFile
- import ImageFilter
- import ImageEnhance
- import ImageColor
-except ImportError:
- try:
- from PIL import Image
- from PIL import ImageFile
- from PIL import ImageFilter
- from PIL import ImageEnhance
- from PIL import ImageColor
- except ImportError:
- raise ImportError('ImageKit was unable to import the Python Imaging Library. Please confirm it`s installed and available on your current Python path.') \ No newline at end of file
diff --git a/imagekit/management/__init__.py b/imagekit/management/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/imagekit/management/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/imagekit/management/commands/__init__.py b/imagekit/management/commands/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/imagekit/management/commands/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/imagekit/management/commands/ikflush.py b/imagekit/management/commands/ikflush.py
deleted file mode 100644
index c03440f..0000000
--- a/imagekit/management/commands/ikflush.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from django.db.models.loading import cache
-from django.core.management.base import BaseCommand, CommandError
-from optparse import make_option
-from imagekit.models import ImageModel
-from imagekit.specs import ImageSpec
-
-
-class Command(BaseCommand):
- help = ('Clears all ImageKit cached files.')
- args = '[apps]'
- requires_model_validation = True
- can_import_settings = True
-
- def handle(self, *args, **options):
- return flush_cache(args, options)
-
-def flush_cache(apps, options):
- """ Clears the image cache
-
- """
- apps = [a.strip(',') for a in apps]
- if apps:
- print 'Flushing cache for %s...' % ', '.join(apps)
- else:
- print 'Flushing caches...'
-
- for app_label in apps:
- app = cache.get_app(app_label)
- models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)]
-
- for model in models:
- for obj in model.objects.all():
- for spec in model._ik.specs:
- prop = getattr(obj, spec.name(), None)
- if prop is not None:
- prop._delete()
- if spec.pre_cache:
- prop._create()
diff --git a/imagekit/models.py b/imagekit/models.py
deleted file mode 100644
index 140715e..0000000
--- a/imagekit/models.py
+++ /dev/null
@@ -1,136 +0,0 @@
-import os
-from datetime import datetime
-from django.conf import settings
-from django.core.files.base import ContentFile
-from django.db import models
-from django.db.models.base import ModelBase
-from django.utils.translation import ugettext_lazy as _
-
-from imagekit import specs
-from imagekit.lib import *
-from imagekit.options import Options
-from imagekit.utils import img_to_fobj
-
-# Modify image file buffer size.
-ImageFile.MAXBLOCK = getattr(settings, 'PIL_IMAGEFILE_MAXBLOCK', 256 * 2 ** 10)
-
-# Choice tuples for specifying the crop origin.
-# These are provided for convenience.
-CROP_HORZ_CHOICES = (
- (0, _('left')),
- (1, _('center')),
- (2, _('right')),
-)
-
-CROP_VERT_CHOICES = (
- (0, _('top')),
- (1, _('center')),
- (2, _('bottom')),
-)
-
-
-class ImageModelBase(ModelBase):
- """ ImageModel metaclass
-
- This metaclass parses IKOptions and loads the specified specification
- module.
-
- """
- def __init__(cls, name, bases, attrs):
- parents = [b for b in bases if isinstance(b, ImageModelBase)]
- if not parents:
- return
- user_opts = getattr(cls, 'IKOptions', None)
- opts = Options(user_opts)
- try:
- module = __import__(opts.spec_module, {}, {}, [''])
- except ImportError:
- raise ImportError('Unable to load imagekit config module: %s' % \
- opts.spec_module)
- for spec in [spec for spec in module.__dict__.values() \
- if isinstance(spec, type) \
- and issubclass(spec, specs.ImageSpec) \
- and spec != specs.ImageSpec]:
- setattr(cls, spec.name(), specs.Descriptor(spec))
- opts.specs.append(spec)
- setattr(cls, '_ik', opts)
-
-
-class ImageModel(models.Model):
- """ Abstract base class implementing all core ImageKit functionality
-
- Subclasses of ImageModel are augmented with accessors for each defined
- image specification and can override the inner IKOptions class to customize
- storage locations and other options.
-
- """
- __metaclass__ = ImageModelBase
-
- class Meta:
- abstract = True
-
- class IKOptions:
- pass
-
- def admin_thumbnail_view(self):
- if not self._imgfield:
- return None
- prop = getattr(self, self._ik.admin_thumbnail_spec, None)
- if prop is None:
- return 'An "%s" image spec has not been defined.' % \
- self._ik.admin_thumbnail_spec
- else:
- if hasattr(self, 'get_absolute_url'):
- return u'<a href="%s"><img src="%s"></a>' % \
- (self.get_absolute_url(), prop.url)
- else:
- return u'<a href="%s"><img src="%s"></a>' % \
- (self._imgfield.url, prop.url)
- admin_thumbnail_view.short_description = _('Thumbnail')
- admin_thumbnail_view.allow_tags = True
-
- @property
- def _imgfield(self):
- return getattr(self, self._ik.image_field)
-
- def _clear_cache(self):
- for spec in self._ik.specs:
- prop = getattr(self, spec.name())
- prop._delete()
-
- def _pre_cache(self):
- for spec in self._ik.specs:
- if spec.pre_cache:
- prop = getattr(self, spec.name())
- prop._create()
-
- def save(self, clear_cache=True, *args, **kwargs):
- is_new_object = self._get_pk_val is None
- super(ImageModel, self).save(*args, **kwargs)
- if is_new_object:
- clear_cache = False
- spec = self._ik.preprocessor_spec
- if spec is not None:
- newfile = self._imgfield.storage.open(str(self._imgfield))
- img = Image.open(newfile)
- img = spec.process(img, None)
- format = img.format or 'JPEG'
- if format != 'JPEG':
- imgfile = img_to_fobj(img, format)
- else:
- imgfile = img_to_fobj(img, format,
- quality=int(spec.quality),
- optimize=True)
- content = ContentFile(imgfile.read())
- newfile.close()
- name = str(self._imgfield)
- self._imgfield.storage.delete(name)
- self._imgfield.storage.save(name, content)
- if clear_cache and self._imgfield != '':
- self._clear_cache()
- self._pre_cache()
-
- def delete(self):
- assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
- self._clear_cache()
- models.Model.delete(self)
diff --git a/imagekit/options.py b/imagekit/options.py
deleted file mode 100644
index 022cc9e..0000000
--- a/imagekit/options.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Imagekit options
-from imagekit import processors
-from imagekit.specs import ImageSpec
-
-
-class Options(object):
- """ Class handling per-model imagekit options
-
- """
- image_field = 'image'
- crop_horz_field = 'crop_horz'
- crop_vert_field = 'crop_vert'
- preprocessor_spec = None
- cache_dir = 'cache'
- save_count_as = None
- cache_filename_format = "%(filename)s_%(specname)s.%(extension)s"
- admin_thumbnail_spec = 'admin_thumbnail'
- spec_module = 'imagekit.defaults'
-
- def __init__(self, opts):
- for key, value in opts.__dict__.iteritems():
- setattr(self, key, value)
- self.specs = [] \ No newline at end of file
diff --git a/imagekit/processors.py b/imagekit/processors.py
deleted file mode 100644
index 6f6b480..0000000
--- a/imagekit/processors.py
+++ /dev/null
@@ -1,134 +0,0 @@
-""" Imagekit Image "ImageProcessors"
-
-A processor defines a set of class variables (optional) and a
-class method named "process" which processes the supplied image using
-the class properties as settings. The process method can be overridden as well allowing user to define their
-own effects/processes entirely.
-
-"""
-from imagekit.lib import *
-
-class ImageProcessor(object):
- """ Base image processor class """
- @classmethod
- def process(cls, image, obj=None):
- return image
-
-
-class Adjustment(ImageProcessor):
- color = 1.0
- brightness = 1.0
- contrast = 1.0
- sharpness = 1.0
-
- @classmethod
- def process(cls, image, obj=None):
- for name in ['Color', 'Brightness', 'Contrast', 'Sharpness']:
- factor = getattr(cls, name.lower())
- if factor != 1.0:
- image = getattr(ImageEnhance, name)(image).enhance(factor)
- return image
-
-
-class Reflection(ImageProcessor):
- background_color = '#FFFFFF'
- size = 0.0
- opacity = 0.6
-
- @classmethod
- def process(cls, image, obj=None):
- # convert bgcolor string to rgb value
- background_color = ImageColor.getrgb(cls.background_color)
- # copy orignial image and flip the orientation
- reflection = image.copy().transpose(Image.FLIP_TOP_BOTTOM)
- # create a new image filled with the bgcolor the same size
- background = Image.new("RGB", image.size, background_color)
- # calculate our alpha mask
- start = int(255 - (255 * cls.opacity)) # The start of our gradient
- steps = int(255 * cls.size) # the number of intermedite values
- increment = (255 - start) / float(steps)
- mask = Image.new('L', (1, 255))
- for y in range(255):
- if y < steps:
- val = int(y * increment + start)
- else:
- val = 255
- mask.putpixel((0, y), val)
- alpha_mask = mask.resize(image.size)
- # merge the reflection onto our background color using the alpha mask
- reflection = Image.composite(background, reflection, alpha_mask)
- # crop the reflection
- reflection_height = int(image.size[1] * cls.size)
- reflection = reflection.crop((0, 0, image.size[0], reflection_height))
- # create new image sized to hold both the original image and the reflection
- composite = Image.new("RGB", (image.size[0], image.size[1]+reflection_height), background_color)
- # paste the orignal image and the reflection into the composite image
- composite.paste(image, (0, 0))
- composite.paste(reflection, (0, image.size[1]))
- # return the image complete with reflection effect
- return composite
-
-
-class Resize(ImageProcessor):
- width = None
- height = None
- crop = False
- upscale = False
-
- @classmethod
- def process(cls, image, obj=None):
- cur_width, cur_height = image.size
- if cls.crop:
- crop_horz = getattr(obj, obj._ik.crop_horz_field, 1)
- crop_vert = getattr(obj, obj._ik.crop_vert_field, 1)
- ratio = max(float(cls.width)/cur_width, float(cls.height)/cur_height)
- resize_x, resize_y = ((cur_width * ratio), (cur_height * ratio))
- crop_x, crop_y = (abs(cls.width - resize_x), abs(cls.height - resize_y))
- x_diff, y_diff = (int(crop_x / 2), int(crop_y / 2))
- box_left, box_right = {
- 0: (0, cls.width),
- 1: (int(x_diff), int(x_diff + cls.width)),
- 2: (int(crop_x), int(resize_x)),
- }[crop_horz]
- box_upper, box_lower = {
- 0: (0, cls.height),
- 1: (int(y_diff), int(y_diff + cls.height)),
- 2: (int(crop_y), int(resize_y)),
- }[crop_vert]
- box = (box_left, box_upper, box_right, box_lower)
- image = image.resize((int(resize_x), int(resize_y)), Image.ANTIALIAS).crop(box)
- else:
- if not cls.width is None and not cls.height is None:
- ratio = min(float(cls.width)/cur_width,
- float(cls.height)/cur_height)
- else:
- if cls.width is None:
- ratio = float(cls.height)/cur_height
- else:
- ratio = float(cls.width)/cur_width
- new_dimensions = (int(round(cur_width*ratio)),
- int(round(cur_height*ratio)))
- if new_dimensions[0] > cur_width or \
- new_dimensions[1] > cur_height:
- if not cls.upscale:
- return image
- image = image.resize(new_dimensions, Image.ANTIALIAS)
- return image
-
-
-class Transpose(ImageProcessor):
- """ Rotates or flips the image
-
- Method should be one of the following strings:
- - FLIP_LEFT RIGHT
- - FLIP_TOP_BOTTOM
- - ROTATE_90
- - ROTATE_270
- - ROTATE_180
-
- """
- method = 'FLIP_LEFT_RIGHT'
-
- @classmethod
- def process(cls, image, obj=None):
- return image.transpose(getattr(Image, cls.method))
diff --git a/imagekit/specs.py b/imagekit/specs.py
deleted file mode 100644
index a6832ba..0000000
--- a/imagekit/specs.py
+++ /dev/null
@@ -1,119 +0,0 @@
-""" ImageKit image specifications
-
-All imagekit specifications must inherit from the ImageSpec class. Models
-inheriting from ImageModel will be modified with a descriptor/accessor for each
-spec found.
-
-"""
-import os
-from StringIO import StringIO
-from imagekit.lib import *
-from imagekit.utils import img_to_fobj
-from django.core.files.base import ContentFile
-
-class ImageSpec(object):
- pre_cache = False
- quality = 70
- increment_count = False
- processors = []
-
- @classmethod
- def name(cls):
- return getattr(cls, 'access_as', cls.__name__.lower())
-
- @classmethod
- def process(cls, image, obj):
- processed_image = image.copy()
- for proc in cls.processors:
- processed_image = proc.process(processed_image, obj)
- return processed_image
-
-
-class Accessor(object):
- def __init__(self, obj, spec):
- self._img = None
- self._obj = obj
- self.spec = spec
-
- def _get_imgfile(self):
- format = self._img.format or 'JPEG'
- if format != 'JPEG':
- imgfile = img_to_fobj(self._img, format)
- else:
- imgfile = img_to_fobj(self._img, format,
- quality=int(self.spec.quality),
- optimize=True)
- return imgfile
-
- def _create(self):
- if self._exists():
- return
- # process the original image file
- fp = self._obj._imgfield.storage.open(self._obj._imgfield.name)
- fp.seek(0)
- fp = StringIO(fp.read())
- try:
- self._img = self.spec.process(Image.open(fp), self._obj)
- # save the new image to the cache
- content = ContentFile(self._get_imgfile().read())
- self._obj._imgfield.storage.save(self.name, content)
- except IOError:
- pass
-
- def _delete(self):
- self._obj._imgfield.storage.delete(self.name)
-
- def _exists(self):
- return self._obj._imgfield.storage.exists(self.name)
-
- def _basename(self):
- filename, extension = \
- os.path.splitext(os.path.basename(self._obj._imgfield.name))
- return self._obj._ik.cache_filename_format % \
- {'filename': filename,
- 'specname': self.spec.name(),
- 'extension': extension.lstrip('.')}
-
- @property
- def name(self):
- return os.path.join(self._obj._ik.cache_dir, self._basename())
-
- @property
- def url(self):
- self._create()
- if self.spec.increment_count:
- fieldname = self._obj._ik.save_count_as
- if fieldname is not None:
- current_count = getattr(self._obj, fieldname)
- setattr(self._obj, fieldname, current_count + 1)
- self._obj.save(clear_cache=False)
- return self._obj._imgfield.storage.url(self.name)
-
- @property
- def file(self):
- self._create()
- return self._obj._imgfield.storage.open(self.name)
-
- @property
- def image(self):
- if self._img is None:
- self._create()
- if self._img is None:
- self._img = Image.open(self.file)
- return self._img
-
- @property
- def width(self):
- return self.image.size[0]
-
- @property
- def height(self):
- return self.image.size[1]
-
-
-class Descriptor(object):
- def __init__(self, spec):
- self._spec = spec
-
- def __get__(self, obj, type=None):
- return Accessor(obj, self._spec)
diff --git a/imagekit/tests.py b/imagekit/tests.py
deleted file mode 100644
index 8c2eb5e..0000000
--- a/imagekit/tests.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import os
-import tempfile
-import unittest
-from django.conf import settings
-from django.core.files.base import ContentFile
-from django.db import models
-from django.test import TestCase
-
-from imagekit import processors
-from imagekit.models import ImageModel
-from imagekit.specs import ImageSpec
-from imagekit.lib import Image
-
-
-class ResizeToWidth(processors.Resize):
- width = 100
-
-class ResizeToHeight(processors.Resize):
- height = 100
-
-class ResizeToFit(processors.Resize):
- width = 100
- height = 100
-
-class ResizeCropped(ResizeToFit):
- crop = ('center', 'center')
-
-class TestResizeToWidth(ImageSpec):
- access_as = 'to_width'
- processors = [ResizeToWidth]
-
-class TestResizeToHeight(ImageSpec):
- access_as = 'to_height'
- processors = [ResizeToHeight]
-
-class TestResizeCropped(ImageSpec):
- access_as = 'cropped'
- processors = [ResizeCropped]
-
-class TestPhoto(ImageModel):
- """ Minimal ImageModel class for testing """
- image = models.ImageField(upload_to='images')
-
- class IKOptions:
- spec_module = 'imagekit.tests'
-
-
-class IKTest(TestCase):
- """ Base TestCase class """
- def setUp(self):
- # create a test image using tempfile and PIL
- self.tmp = tempfile.TemporaryFile()
- Image.new('RGB', (800, 600)).save(self.tmp, 'JPEG')
- self.tmp.seek(0)
- self.p = TestPhoto()
- self.p.image.save(os.path.basename('test.jpg'),
- ContentFile(self.tmp.read()))
- self.p.save()
- # destroy temp file
- self.tmp.close()
-
- def test_setup(self):
- self.assertEqual(self.p.image.width, 800)
- self.assertEqual(self.p.image.height, 600)
-
- def test_to_width(self):
- self.assertEqual(self.p.to_width.width, 100)
- self.assertEqual(self.p.to_width.height, 75)
-
- def test_to_height(self):
- self.assertEqual(self.p.to_height.width, 133)
- self.assertEqual(self.p.to_height.height, 100)
-
- def test_crop(self):
- self.assertEqual(self.p.cropped.width, 100)
- self.assertEqual(self.p.cropped.height, 100)
-
- def test_url(self):
- tup = (settings.MEDIA_URL, self.p._ik.cache_dir, 'test_to_width.jpg')
- self.assertEqual(self.p.to_width.url, "%s%s/%s" % tup)
-
- def tearDown(self):
- # make sure image file is deleted
- path = self.p.image.path
- self.p.delete()
- self.failIf(os.path.isfile(path))
diff --git a/imagekit/utils.py b/imagekit/utils.py
deleted file mode 100644
index 352d40f..0000000
--- a/imagekit/utils.py
+++ /dev/null
@@ -1,15 +0,0 @@
-""" ImageKit utility functions """
-
-import tempfile
-
-def img_to_fobj(img, format, **kwargs):
- tmp = tempfile.TemporaryFile()
- if format != 'JPEG':
- try:
- img.save(tmp, format, **kwargs)
- return
- except KeyError:
- pass
- img.save(tmp, format, **kwargs)
- tmp.seek(0)
- return tmp
diff --git a/media/js/survey.js b/media/js/survey.js
index 30305dc..268d655 100644
--- a/media/js/survey.js
+++ b/media/js/survey.js
@@ -29,12 +29,12 @@
}
function redirectSurvey(){
- window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
+ window.location = "{{ URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23" + document.getElementById("surveyChooser").value;
document.getElementById("progressTableContent").style.display='hidden'
}
function redirectYear(){
- window.location = "{{ settings.URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
+ window.location = "{{ URL_ROOT }}/survey/" + document.getElementById("expeditionChooser").value + "%23"
}
diff --git a/parsers/logbooks.py b/parsers/logbooks.py
index e5817a6..224b2cd 100644
--- a/parsers/logbooks.py
+++ b/parsers/logbooks.py
@@ -121,7 +121,7 @@ def ParseDate(tripdate, year):
day, month, year = int(mdategoof.group(1)), int(mdategoof.group(2)), int(mdategoof.group(4)) + yadd
else:
assert False, tripdate
- return datetime.date(year, month, day)
+ return make_aware(datetime.datetime(year, month, day), get_current_timezone())
# 2006, 2008 - 2010
def Parselogwikitxt(year, expedition, txt):
diff --git a/parsers/people.py b/parsers/people.py
index 34a5ff3..28a036a 100644
--- a/parsers/people.py
+++ b/parsers/people.py
@@ -87,7 +87,7 @@ def LoadPersonsExpos():
lastname = ""
lookupAttribs={'first_name':firstname, 'last_name':(lastname or "")}
- nonLookupAttribs={'is_vfho':personline[header["VfHO member"]], 'fullname':fullname}
+ nonLookupAttribs={'is_vfho':bool(personline[header["VfHO member"]]), 'fullname':fullname}
person, created = save_carefully(models.Person, lookupAttribs, nonLookupAttribs)
parseMugShotAndBlurb(personline=personline, header=header, person=person)
@@ -100,26 +100,6 @@ def LoadPersonsExpos():
nonLookupAttribs = {'nickname':nickname, 'is_guest':(personline[header["Guest"]] == "1")}
save_carefully(models.PersonExpedition, lookupAttribs, nonLookupAttribs)
-
- # this fills in those people for whom 2008 was their first expo
- #print "Loading personexpeditions 2008"
- #expoers2008 = """Edvin Deadman,Kathryn Hopkins,Djuke Veldhuis,Becka Lawson,Julian Todd,Natalie Uomini,Aaron Curtis,Tony Rooke,Ollie Stevens,Frank Tully,Martin Jahnke,Mark Shinwell,Jess Stirrups,Nial Peters,Serena Povia,Olly Madge,Steve Jones,Pete Harley,Eeva Makiranta,Keith Curtis""".split(",")
- #expomissing = set(expoers2008)
- #for name in expomissing:
- # firstname, lastname = name.split()
- # is_guest = name in ["Eeva Makiranta", "Keith Curtis"]
- # print "2008:", name
- # persons = list(models.Person.objects.filter(first_name=firstname, last_name=lastname))
- # if not persons:
- # person = models.Person(first_name=firstname, last_name = lastname, is_vfho = False, mug_shot = "")
- # #person.Sethref()
- # person.save()
- # else:
- # person = persons[0]
- # expedition = models.Expedition.objects.get(year="2008")
- # personexpedition = models.PersonExpedition(person=person, expedition=expedition, nickname="", is_guest=is_guest)
- # personexpedition.save()
-
# used in other referencing parser functions
# expedition name lookup cached for speed (it's a very big list)
Gpersonexpeditionnamelookup = { }
diff --git a/parsers/survex.py b/parsers/survex.py
index 294de73..f80f9f2 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -5,9 +5,14 @@ import troggle.settings as settings
from subprocess import call, Popen, PIPE
from troggle.parsers.people import GetPersonExpeditionNameLookup
+from django.utils.timezone import get_current_timezone
+from django.utils.timezone import make_aware
+
import re
import os
+from datetime import datetime
+line_leg_regex = re.compile(r"[\d\-+.]+$")
def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
ls = sline.lower().split()
@@ -53,8 +58,8 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment):
survexleg.compass = 1000
survexleg.clino = -90.0
else:
- assert re.match(r"[\d\-+.]+$", lcompass), ls
- assert re.match(r"[\d\-+.]+$", lclino) and lclino != "-", ls
+ assert line_leg_regex.match(lcompass), ls
+ assert line_leg_regex.match(lclino) and lclino != "-", ls
survexleg.compass = float(lcompass)
survexleg.clino = float(lclino)
@@ -80,11 +85,16 @@ def LoadSurvexEquate(survexblock, sline):
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
pass
-
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
+regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
+regex_ref = re.compile(r'.*?ref.*?(\d+)\s*#\s*(\d+)')
+regex_star = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
+regex_team = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
+regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
+
def RecursiveLoad(survexblock, survexfile, fin, textlines):
iblankbegins = 0
text = [ ]
@@ -92,37 +102,47 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
teammembers = [ ]
# uncomment to print out all files during parsing
- print("Reading file: " + survexblock.survexfile.path)
+ print(" - Reading file: " + survexblock.survexfile.path)
+ stamp = datetime.now()
+ lineno = 0
while True:
svxline = fin.readline().decode("latin1")
if not svxline:
+ print(' - Not survex')
return
textlines.append(svxline)
+ lineno += 1
+
+ # print(' - Line: %d' % lineno)
+
# break the line at the comment
- sline, comment = re.match(r"([^;]*?)\s*(?:;\s*(.*))?\n?$", svxline.strip()).groups()
+ sline, comment = regex_comment.match(svxline.strip()).groups()
# detect ref line pointing to the scans directory
- mref = comment and re.match(r'.*?ref.*?(\d+)\s*#\s*(\d+)', comment)
+ mref = comment and regex_ref.match(comment)
if mref:
refscan = "%s#%s" % (mref.group(1), mref.group(2))
survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
if survexscansfolders:
survexblock.survexscansfolder = survexscansfolders[0]
#survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
- survexblock.save()
+ survexblock.save()
continue
if not sline:
continue
# detect the star command
- mstar = re.match(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$', sline)
+ mstar = regex_star.match(sline)
if not mstar:
if "from" in stardata:
LoadSurvexLineLeg(survexblock, stardata, sline, comment)
+ # print(' - From: ')
+ #print(stardata)
elif stardata["type"] == "passage":
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
+ # print(' - Passage: ')
#Missing "station" in stardata.
continue
@@ -131,7 +151,19 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
cmd = cmd.lower()
if re.match("include$(?i)", cmd):
includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
- includesurvexfile = models.SurvexFile(path=includepath, cave=survexfile.cave)
+ print(' - Include file found including - ' + includepath)
+ # Try to find the cave in the DB if not use the string as before
+ path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
+ if path_match:
+ pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
+ print(pos_cave)
+ cave = models.getCaveByReference(pos_cave)
+ if not cave:
+ cave = survexfile.cave
+ else:
+ print('No match for %s' % includepath)
+ cave = survexfile.cave
+ includesurvexfile = models.SurvexFile(path=includepath, cave=cave)
includesurvexfile.save()
includesurvexfile.SetDirectory()
if includesurvexfile.exists():
@@ -141,6 +173,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
elif re.match("begin$(?i)", cmd):
if line:
name = line.lower()
+ #print(' - Begin found for: ' + name)
survexblockdown = models.SurvexBlock(name=name, begin_char=fin.tell(), parent=survexblock, survexpath=survexblock.survexpath+"."+name, cave=survexblock.cave, survexfile=survexfile, totalleglength=0.0)
survexblockdown.save()
textlinesdown = [ ]
@@ -154,11 +187,16 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
else:
survexblock.text = "".join(textlines)
survexblock.save()
+ # print(' - End found: ')
+ endstamp = datetime.now()
+ timetaken = endstamp - stamp
+ # print(' - Time to process: ' + str(timetaken))
return
elif re.match("date$(?i)", cmd):
if len(line) == 10:
- survexblock.date = re.sub(r"\.", "-", line)
+ #print(' - Date found: ' + line)
+ survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
expeditions = models.Expedition.objects.filter(year=line[:4])
if expeditions:
assert len(expeditions) == 1
@@ -167,9 +205,11 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
survexblock.save()
elif re.match("team$(?i)", cmd):
- mteammember = re.match(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)", line)
+ pass
+ # print(' - Team found: ')
+ mteammember = regex_team.match(line)
if mteammember:
- for tm in re.split(r" and | / |, | & | \+ |^both$|^none$(?i)", mteammember.group(2)):
+ for tm in regex_team_member.split(mteammember.group(2)):
if tm:
personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
if (personexpedition, tm) not in teammembers:
@@ -181,6 +221,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
personrole.save()
elif cmd == "title":
+ #print(' - Title found: ')
survextitle = models.SurvexTitle(survexblock=survexblock, title=line.strip('"'), cave=survexblock.cave)
survextitle.save()
@@ -189,8 +230,11 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
pass
elif cmd == "data":
+ #print(' - Data found: ')
ls = line.lower().split()
stardata = { "type":ls[0] }
+ #print(' - Star data: ', stardata)
+ #print(ls)
for i in range(0, len(ls)):
stardata[stardataparamconvert.get(ls[i], ls[i])] = i - 1
if ls[0] in ["normal", "cartesian", "nosurvey"]:
@@ -201,33 +245,47 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
assert ls[0] == "passage", line
elif cmd == "equate":
+ #print(' - Equate found: ')
LoadSurvexEquate(survexblock, line)
elif cmd == "fix":
+ #print(' - Fix found: ')
survexblock.MakeSurvexStation(line.split()[0])
else:
+ #print(' - Stuff')
if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
"calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
+ endstamp = datetime.now()
+ timetaken = endstamp - stamp
+ # print(' - Time to process: ' + str(timetaken))
def ReloadSurvexCave(survex_cave, area):
- print(survex_cave, area)
- cave = models.Cave.objects.get(kataster_number=survex_cave, area__short_name=area)
- print(cave)
- #cave = models.Cave.objects.get(kataster_number=survex_cave)
+ print(' - Area: ' + str(area) + ' Cave: ' + str(survex_cave))
+ filt_cave = models.Cave.objects.filter(kataster_number=survex_cave, area__short_name=area)[:1]
+
+ if len(filt_cave) < 1:
+ filt_cave = models.Cave.objects.filter(unofficial_number=survex_cave, area__short_name=area)[:1]
+
+ cave = filt_cave[0]
+ print(' - ' + str(cave))
cave.survexblock_set.all().delete()
cave.survexfile_set.all().delete()
cave.survexdirectory_set.all().delete()
-
+
+ file_stamp_start = datetime.now()
survexfile = models.SurvexFile(path="caves-" + cave.kat_area() + "/" + survex_cave + "/" + survex_cave, cave=cave)
survexfile.save()
survexfile.SetDirectory()
-
+
survexblockroot = models.SurvexBlock(name="root", survexpath="caves-" + cave.kat_area(), begin_char=0, cave=cave, survexfile=survexfile, totalleglength=0.0)
survexblockroot.save()
fin = survexfile.OpenFile()
+ file_stamp_end = datetime.now()
+ file_time = file_stamp_end - file_stamp_start
+ print(' - Files time to process: ' + str(file_time))
textlines = [ ]
RecursiveLoad(survexblockroot, survexfile, fin, textlines)
survexblockroot.text = "".join(textlines)
@@ -268,10 +326,18 @@ def LoadAllSurvexBlocks():
print(" - Reloading all caves")
caves = models.Cave.objects.all()
for cave in caves:
+ rec_stamp_start = datetime.now()
if cave.kataster_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.kataster_number)):
if cave.kataster_number not in ['40']:
- print("loading", cave, cave.kat_area())
- ReloadSurvexCave(cave.kataster_number, cave.kat_area())
+ print(" - Loading " + str(cave) + " " + cave.kat_area())
+ #ReloadSurvexCave(cave.kataster_number, cave.kat_area())
+ rec_stamp_end = datetime.now()
+ if cave.unofficial_number and os.path.isdir(os.path.join(settings.SURVEX_DATA, "caves-" + cave.kat_area(), cave.unofficial_number)):
+ print(" - Loading " + str(cave) + " " + cave.kat_area())
+ #ReloadSurvexCave(cave.unofficial_number, cave.kat_area())
+ timetaken = rec_stamp_end - rec_stamp_start
+ # print(' - Time to process: ' + str(timetaken))
+ # print('--------')
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
@@ -286,7 +352,7 @@ def LoadPos():
posfile = open("%s%s.pos" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME))
posfile.readline() #Drop header
for line in posfile.readlines():
- r = poslineregex.match(line)
+ r = poslineregex.match(line)
if r:
x, y, z, name = r.groups()
try:
diff --git a/parsers/surveys.py b/parsers/surveys.py
index efab536..0eed6f0 100644
--- a/parsers/surveys.py
+++ b/parsers/surveys.py
@@ -1,12 +1,7 @@
import sys, os, types, logging, stat
-#sys.path.append('C:\\Expo\\expoweb')
-#from troggle import *
-#os.environ['DJANGO_SETTINGS_MODULE']='troggle.settings'
import settings
from troggle.core.models import *
from PIL import Image
-#import settings
-#import core.models as models
import csv
import re
import datetime
@@ -45,21 +40,18 @@ def readSurveysFromCSV():
logging.info("Deleting all scanned images")
ScannedImage.objects.all().delete()
-
-
+
logging.info("Deleting all survey objects")
Survey.objects.all().delete()
-
-
+
logging.info("Beginning to import surveys from "+str(os.path.join(settings.SURVEYS, "Surveys.csv"))+"\n"+"-"*60+"\n")
-
+
for survey in surveyreader:
- #I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
+ # I hate this, but some surveys have a letter eg 2000#34a. The next line deals with that.
walletNumberLetter = re.match(r'(?P<number>\d*)(?P<letter>[a-zA-Z]*)',survey[header['Survey Number']])
- # print(walletNumberLetter.groups())
+ # print(walletNumberLetter.groups())
year=survey[header['Year']]
-
surveyobj = Survey(
expedition = Expedition.objects.filter(year=year)[0],
wallet_number = walletNumberLetter.group('number'),
@@ -73,7 +65,6 @@ def readSurveysFromCSV():
pass
surveyobj.save()
-
logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
# dead
@@ -141,7 +132,7 @@ def parseSurveyScans(expedition, logfile=None):
yearPath=os.path.join(settings.SURVEY_SCANS, "surveyscans", expedition.year)
print("No folder found for " + expedition.year + " at:- " + yearPath)
-# dead
+
def parseSurveys(logfile=None):
try:
readSurveysFromCSV()
@@ -271,8 +262,9 @@ def SetTunnelfileInfo(tunnelfile):
fin.close()
mtype = re.search("<(fontcolours|sketch)", ttext)
- assert mtype, ff
- tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
+ #assert mtype, ff
+ if mtype:
+ tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
tunnelfile.npaths = len(re.findall("<skpath", ttext))
tunnelfile.save()
diff --git a/profiles/urls.py b/profiles/urls.py
index d10894d..6937a24 100644
--- a/profiles/urls.py
+++ b/profiles/urls.py
@@ -27,7 +27,7 @@ from django.conf.urls import *
from profiles import views
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^select/$',
views.select_profile,
name='profiles_select_profile'),
@@ -43,4 +43,4 @@ urlpatterns = patterns('',
url(r'^$',
views.profile_list,
name='profiles_profile_list'),
- )
+]
diff --git a/profiles/utils.py b/profiles/utils.py
index c2dfd61..4f42415 100644
--- a/profiles/utils.py
+++ b/profiles/utils.py
@@ -14,8 +14,7 @@ try:
except ImportError: # django >= 1.7
SiteProfileNotAvailable = type('SiteProfileNotAvailable', (Exception,), {})
-from django.db.models import get_model
-
+from django.apps import apps
def get_profile_model():
"""
@@ -28,7 +27,7 @@ def get_profile_model():
if (not hasattr(settings, 'AUTH_PROFILE_MODULE')) or \
(not settings.AUTH_PROFILE_MODULE):
raise SiteProfileNotAvailable
- profile_mod = get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
+ profile_mod = apps.get_model(*settings.AUTH_PROFILE_MODULE.split('.'))
if profile_mod is None:
raise SiteProfileNotAvailable
return profile_mod
diff --git a/settings.py b/settings.py
index 68b5ff1..4470361 100644
--- a/settings.py
+++ b/settings.py
@@ -8,7 +8,6 @@ BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Django settings for troggle project.
DEBUG = True
-TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [u'expo.survex.com']
@@ -56,6 +55,8 @@ SVX_URL = urlparse.urljoin(URL_ROOT , '/survex/')
# top-level survex file basename (without .svx)
SURVEX_TOPNAME = "1623"
+KAT_AREAS = ['1623', '1624', '1626', '1627']
+
DEFAULT_LOGBOOK_PARSER = "Parseloghtmltxt"
DEFAULT_LOGBOOK_FILE = "logbook.html"
@@ -96,20 +97,34 @@ SMART_APPEND_SLASH = True
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'a#vaeozn0)uz_9t_%v5n#tj)m+%ace6b_0(^fj!355qki*v)j2'
-# List of callables that know how to import templates from various sources.
-TEMPLATE_LOADERS = (
- 'django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',
-# 'django.template.loaders.eggs.load_template_source',
-)
+TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [
+ os.path.join(PYTHON_PATH, 'templates')
+ ],
+ 'APP_DIRS': True,
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.contrib.auth.context_processors.auth',
+ 'django.template.context_processors.debug',
+ 'django.template.context_processors.i18n',
+ 'django.template.context_processors.media',
+ 'django.template.context_processors.static',
+ 'django.template.context_processors.tz',
+ 'django.contrib.messages.context_processors.messages',
+ 'django.template.context_processors.request',
+ #'core.context.troggle_context'
+ ]
+ },
+ },
+]
if django.VERSION[0] == 1 and django.VERSION[1] < 4:
authmodule = 'django.core.context_processors.auth'
else:
authmodule = 'django.contrib.auth.context_processors.auth'
-TEMPLATE_CONTEXT_PROCESSORS = ( authmodule, "core.context.troggle_context", )
-
LOGIN_REDIRECT_URL = '/'
INSTALLED_APPS = (
@@ -122,14 +137,13 @@ INSTALLED_APPS = (
'django.contrib.messages',
'django.contrib.staticfiles',
#'troggle.photologue',
- #'troggle.reversion',
- #'django_evolution',
'tinymce',
'registration',
'troggle.profiles',
'troggle.core',
'troggle.flatpages',
- 'troggle.imagekit',
+ 'imagekit',
+ 'django_extensions',
)
MIDDLEWARE_CLASSES = (
diff --git a/templates/base.html b/templates/base.html
index 666ff7a..e160b76 100644
--- a/templates/base.html
+++ b/templates/base.html
@@ -64,8 +64,8 @@
<div id="related">
{% block related %}
<script language="javascript">
- $('#related').remove()
- /*This is a hack to stop a line appearing because of the empty div border*/
+ $('#related').remove()
+ /*This is a hack to stop a line appearing because of the empty div border*/
</script>
{% endblock %}
</div>
diff --git a/templates/expedition.html b/templates/expedition.html
index b5b58a8..8feeac6 100644
--- a/templates/expedition.html
+++ b/templates/expedition.html
@@ -2,15 +2,15 @@
{% load wiki_markup %}
{% load link %}
-{% block title %}Expedition {{expedition.name}}{% endblock %}
-{% block editLink %}<a href={{expedition.get_admin_url}}>Edit expedition {{expedition|wiki_to_html_short}}</a>{% endblock %}
+{% block title %}Expedition {{this_expedition.name}}{% endblock %}
+{% block editLink %}<a href={{this_expedition.get_admin_url}}>Edit expedition {{expedition|wiki_to_html_short}}</a>{% endblock %}
{% block related %}
{% endblock %}
{% block content %}
-<h2>{{expedition.name}}</h2>
+<h2>{{this_expedition.name}}</h2>
<p><b>Other years:</b>
{% for otherexpedition in expeditions %}
@@ -29,7 +29,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
<table class="expeditionpersonlist">
<tr>
<th>Caver</th>
-{% for expeditionday in expedition.expeditionday_set.all %}
+{% for expeditionday in this_expedition.expeditionday_set.all %}
<th>
{{expeditionday.date.day}}
</th>
@@ -63,7 +63,7 @@ an "S" for a survey trip. The colours are the same for people on the same trip.
<form action="" method="GET"><input type="submit" name="reload" value="Reload"></form>
<h3>Logbooks and survey trips per day</h3>
-<a href="{% url "newLogBookEntry" expeditionyear=expedition.year %}">New logbook entry</a>
+<a href="{% url "newLogBookEntry" expeditionyear=this_expedition.year %}">New logbook entry</a>
<table class="expeditionlogbooks">
<tr><th>Date</th><th>Logged trips</th><th>Surveys</th></tr>
{% regroup dateditems|dictsort:"date" by date as dates %}
diff --git a/urls.py b/urls.py
index 2a30faf..911b872 100644
--- a/urls.py
+++ b/urls.py
@@ -1,18 +1,19 @@
from django.conf.urls import *
from django.conf import settings
+from django.conf.urls.static import static
from core.views import * # flat import
from core.views_other import *
from core.views_caves import *
from core.views_survex import *
from core.models import *
+from flatpages.views import *
from django.views.generic.edit import UpdateView
from django.contrib import admin
from django.views.generic.list import ListView
from django.contrib import admin
admin.autodiscover()
-
# type url probably means it's used.
# HOW DOES THIS WORK:
@@ -20,7 +21,7 @@ admin.autodiscover()
# <reference to python function in 'core' folder>,
# <name optional argument for URL reversing (doesn't do much)>)
-actualurlpatterns = patterns('',
+actualurlpatterns = [
url(r'^testingurl/?$' , views_caves.millenialcaves, name="testing"),
@@ -77,11 +78,10 @@ actualurlpatterns = patterns('',
url(r'^cave/(?P<cave_id>[^/]+)/(?P<year>\d\d\d\d)-(?P<qm_id>\d*)(?P<grade>[ABCDX]?)?$', views_caves.qm, name="qm"),
- url(r'^prospecting_guide/$', views_caves.prospecting),
+ url(r'^prospecting_guide/$', views_caves.prospecting),
url(r'^logbooksearch/(.*)/?$', views_logbooks.logbookSearch),
-
url(r'^statistics/?$', views_other.stats, name="stats"),
url(r'^survey/?$', surveyindex, name="survey"),
@@ -93,78 +93,70 @@ actualurlpatterns = patterns('',
url(r'^logbook(?P<year>\d\d\d\d)\.(?P<extension>.*)/?$',views_other.downloadLogbook),
url(r'^logbook/?$',views_other.downloadLogbook, name="downloadlogbook"),
url(r'^cave/(?P<cave_id>[^/]+)/qm\.csv/?$', views_other.downloadQMs, name="downloadqms"),
- (r'^downloadqms$', views_other.downloadQMs),
+ url(r'^downloadqms$', views_other.downloadQMs),
url(r'^eyecandy$', views_other.eyecandy),
- (r'^admin/doc/?', include('django.contrib.admindocs.urls')),
+ url(r'^admin/doc/?', include('django.contrib.admindocs.urls')),
#url(r'^admin/(.*)', admin.site.get_urls, name="admin"),
- (r'^admin/', include(admin.site.urls)),
+ url(r'^admin/', include(admin.site.urls)),
# don't know why this needs troggle/ in here. nice to get it out
- url(r'^troggle/media-admin/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
+ # url(r'^troggle/media-admin/(?P<path>.*)$', static, {'document_root': settings.MEDIA_ADMIN_DIR, 'show_indexes':True}),
+
+ url(r'^accounts/', include('registration.backends.default.urls')),
+ url(r'^profiles/', include('profiles.urls')),
- (r'^accounts/', include('registration.backends.default.urls')),
- (r'^profiles/', include('profiles.urls')),
-
# (r'^personform/(.*)$', personForm),
- (r'^site_media/(?P<path>.*)$', 'django.views.static.serve',
- {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
- (r'^tinymce_media/(?P<path>.*)$', 'django.views.static.serve',
- {'document_root': settings.TINY_MCE_MEDIA_ROOT, 'show_indexes': True}),
-
-
+ url(r'^site_media/(?P<path>.*)$', static, {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
+
url(r'^survexblock/(.+)$', views_caves.survexblock, name="survexblock"),
url(r'^survexfile/(?P<survex_file>.*?)\.svx$', views_survex.svx, name="svx"),
url(r'^survexfile/(?P<survex_file>.*?)\.3d$', views_survex.threed, name="threed"),
url(r'^survexfile/(?P<survex_file>.*?)\.log$', views_survex.svxraw),
url(r'^survexfile/(?P<survex_file>.*?)\.err$', views_survex.err),
-
-
+
+
url(r'^survexfile/caves/$', views_survex.survexcaveslist, name="survexcaveslist"),
url(r'^survexfile/caves/(?P<survex_cave>.*)$', views_survex.survexcavesingle, name="survexcavessingle"),
url(r'^survexfileraw/(?P<survex_file>.*?)\.svx$', views_survex.svxraw, name="svxraw"),
-
-
- (r'^survey_files/listdir/(?P<path>.*)$', view_surveys.listdir),
- (r'^survey_files/download/(?P<path>.*)$', view_surveys.download),
+
+
+ url(r'^survey_files/listdir/(?P<path>.*)$', view_surveys.listdir),
+ url(r'^survey_files/download/(?P<path>.*)$', view_surveys.download),
#(r'^survey_files/upload/(?P<path>.*)$', view_surveys.upload),
-
-
#(r'^survey_scans/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.SURVEY_SCANS, 'show_indexes':True}),
url(r'^survey_scans/$', view_surveys.surveyscansfolders, name="surveyscansfolders"),
url(r'^survey_scans/(?P<path>[^/]+)/$', view_surveys.surveyscansfolder, name="surveyscansfolder"),
url(r'^survey_scans/(?P<path>[^/]+)/(?P<file>[^/]+(?:png|jpg|jpeg))$',
view_surveys.surveyscansingle, name="surveyscansingle"),
-
+
url(r'^tunneldata/$', view_surveys.tunneldata, name="tunneldata"),
url(r'^tunneldataraw/(?P<path>.+?\.xml)$', view_surveys.tunnelfile, name="tunnelfile"),
url(r'^tunneldataraw/(?P<path>.+?\.xml)/upload$',view_surveys.tunnelfileupload, name="tunnelfileupload"),
-
+
#url(r'^tunneldatainfo/(?P<path>.+?\.xml)$', view_surveys.tunnelfileinfo, name="tunnelfileinfo"),
-
- (r'^photos/(?P<path>.*)$', 'django.views.static.serve',
- {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
-
+
+ # url(r'^photos/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
+
url(r'^prospecting/(?P<name>[^.]+).png$', prospecting_image, name="prospecting_image"),
-# (r'^gallery/(?P<path>.*)$', 'django.views.static.serve',
-# {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
+# (r'^gallery/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.PHOTOS_ROOT, 'show_indexes':True}),
# for those silly ideas
url(r'^experimental.*$', views_logbooks.experimental, name="experimental"),
-
+
#url(r'^trip_report/?$',views_other.tripreport,name="trip_report")
- url(r'^(.*)_edit$', 'flatpages.views.editflatpage', name="editflatpage"),
- url(r'^(.*)$', 'flatpages.views.flatpage', name="flatpage"),
-)
+ url(r'^(.*)_edit$', editflatpage, name="editflatpage"),
+ url(r'^(.*)$', flatpage, name="flatpage"),
+]
#Allow prefix to all urls
-urlpatterns = patterns ('',
- ('^%s' % settings.DIR_ROOT, include(actualurlpatterns))
-)
+urlpatterns = [
+ url('^%s' % settings.DIR_ROOT, include(actualurlpatterns))
+] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
diff --git a/utils.py b/utils.py
index d5deeda..ffb9f66 100644
--- a/utils.py
+++ b/utils.py
@@ -46,7 +46,7 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
instance, created=objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
if not created and not instance.new_since_parsing:
- for k, v in nonLookupAttribs.items(): #overwrite the existing attributes from the logbook text (except date and title)
+ for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
setattr(instance, k, v)
instance.save()
@@ -112,7 +112,7 @@ re_subs = [(re.compile(r"\<b[^>]*\>(.*?)\</b\>", re.DOTALL), r"'''\1'''"),
def html_to_wiki(text, codec = "utf-8"):
if type(text) == str:
- text = unicode(text, codec)
+ text = str(text, codec)
text = re.sub("</p>", r"", text)
text = re.sub("<p>$", r"", text)
text = re.sub("<p>", r"\n\n", text)