summaryrefslogtreecommitdiffstats
path: root/parsers/caves.py
diff options
context:
space:
mode:
authorPhilip Sargent <philip.sargent@klebos.com>2020-07-01 22:49:38 +0100
committerPhilip Sargent <philip.sargent@klebos.com>2020-07-01 22:49:38 +0100
commitdf434cd39909d177f98dec5a7575f61ea701c102 (patch)
tree3e37aee9a8ab1e4e2515170c774c4fa7f6b20514 /parsers/caves.py
parent8cc768e5b6398e4f3fe3211b3f8dc9712e58dd93 (diff)
downloadtroggle-df434cd39909d177f98dec5a7575f61ea701c102.tar.gz
troggle-df434cd39909d177f98dec5a7575f61ea701c102.tar.bz2
troggle-df434cd39909d177f98dec5a7575f61ea701c102.zip
SurvexBlocks now importing in deatil
Diffstat (limited to 'parsers/caves.py')
-rw-r--r--parsers/caves.py9
1 files changed, 9 insertions, 0 deletions
diff --git a/parsers/caves.py b/parsers/caves.py
index d1e7406..2bb2ccc 100644
--- a/parsers/caves.py
+++ b/parsers/caves.py
@@ -192,6 +192,8 @@ def readcave(filename):
url = url[0],
filename = filename)
except:
+ # this slow db query happens on every cave, but on import we have all this in memory
+ # and don't need to do a db query. Fix this to speed it up!
# need to cope with duplicates
print(" ! FAILED to get only one CAVE when updating using: "+filename)
kaves = models_caves.Cave.objects.all().filter(kataster_number=kataster_number[0])
@@ -206,6 +208,8 @@ def readcave(filename):
c = k
for area_slug in areas:
+ # this slow db query happens on every cave, but on import we have all this in memory
+ # and don't need to do a db query. Fix this to speed it up!
area = models_caves.Area.objects.filter(short_name = area_slug)
if area:
newArea = area[0]
@@ -216,6 +220,8 @@ def readcave(filename):
primary = True
for slug in slugs:
try:
+ # this slow db query happens on every cave, but on import we have all this in memory
+ # and don't need to do a db query. Fix this to speed it up!
cs = models_caves.CaveSlug.objects.update_or_create(cave = c,
slug = slug,
primary = primary)
@@ -225,10 +231,13 @@ def readcave(filename):
print(message)
primary = False
+
for entrance in entrances:
slug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
letter = getXML(entrance, "letter", maxItems = 1, context = context)[0]
try:
+ # this slow db query happens on every entrance, but on import we have all this in memory
+ # and don't need to do a db query. Fix this to speed it up!
entrance = models_caves.Entrance.objects.get(entranceslug__slug = slug)
ce = models_caves.CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
except: