diff options
author | Philip Sargent <philip@Muscogee.localdomain> | 2020-05-13 23:11:47 +0100 |
---|---|---|
committer | Philip Sargent <philip@Muscogee.localdomain> | 2020-05-13 23:11:47 +0100 |
commit | 314d0e8b710703706d41fbc4d2567445214509f1 (patch) | |
tree | 2b060bfd3f256f90d2e9dc0b2dd3a03fe2b9c24e /databaseReset.py | |
parent | 0338889905cbb96b16f1db2404d6d89ea8af9226 (diff) | |
download | troggle-314d0e8b710703706d41fbc4d2567445214509f1.tar.gz troggle-314d0e8b710703706d41fbc4d2567445214509f1.tar.bz2 troggle-314d0e8b710703706d41fbc4d2567445214509f1.zip |
skip fast pass option added as default
Diffstat (limited to 'databaseReset.py')
-rw-r--r-- | databaseReset.py | 70 |
1 files changed, 18 insertions, 52 deletions
diff --git a/databaseReset.py b/databaseReset.py index 46bb5d1..2400048 100644 --- a/databaseReset.py +++ b/databaseReset.py @@ -12,6 +12,7 @@ from django.core.urlresolvers import reverse from troggle.core.models import Cave, Entrance import troggle.flatpages.models import json +import troggle.logbooksdump # NOTE databaseRest.py is *imported* by views_other.py as it is used in the control panel # presented there. @@ -107,58 +108,9 @@ def import_tunnelfiles(): parsers.surveys.LoadTunnelFiles() # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -def import_auto_logbooks(): - import parsers.logbooks - import os - for pt in troggle.core.models.PersonTrip.objects.all(): - pt.delete() - for lbe in troggle.core.models.LogbookEntry.objects.all(): - lbe.delete() - for expedition in troggle.core.models.Expedition.objects.all(): - directory = os.path.join(settings.EXPOWEB, - "years", - expedition.year, - "autologbook") - for root, dirs, filenames in os.walk(directory): - for filename in filenames: - print(os.path.join(root, filename)) - parsers.logbooks.parseAutoLogBookEntry(os.path.join(root, filename)) - -#Temporary function until definitive source of data transfered. -from django.template.defaultfilters import slugify -from django.template import Context, loader -def dumplogbooks(): - def get_name(pe): - if pe.nickname: - return pe.nickname - else: - return pe.person.first_name - for lbe in troggle.core.models.LogbookEntry.objects.all(): - dateStr = lbe.date.strftime("%Y-%m-%d") - directory = os.path.join(settings.EXPOWEB, - "years", - lbe.expedition.year, - "autologbook") - if not os.path.isdir(directory): - os.mkdir(directory) - filename = os.path.join(directory, - dateStr + "." + slugify(lbe.title)[:50] + ".html") - if lbe.cave: - print(lbe.cave.reference()) - trip = {"title": lbe.title, "html":lbe.text, "cave": lbe.cave.reference(), "caveOrLocation": "cave"} - else: - trip = {"title": lbe.title, "html":lbe.text, "location":lbe.place, "caveOrLocation": "location"} - pts = [pt for pt in lbe.persontrip_set.all() if pt.personexpedition] - persons = [{"name": get_name(pt.personexpedition), "TU": pt.time_underground, "author": pt.is_logbook_entry_author} for pt in pts] - f = open(filename, "wb") - template = loader.get_template('dataformat/logbookentry.html') - context = Context({'trip': trip, - 'persons': persons, - 'date': dateStr, - 'expeditionyear': lbe.expedition.year}) - output = template.render(context) - f.write(unicode(output).encode( "utf-8" )) - f.close() +#import logbooksdump +#def import_auto_logbooks(): +#def dumplogbooks(): # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - class JobQueue(): @@ -253,6 +205,15 @@ class JobQueue(): dbname = settings.DATABASES['default']['NAME'] dbdefault = settings.DATABASES['default'] + skipmem = False + if self.runlabel: + if self.runlabel == "": + skipmem = True + elif self.runlabel[0:2] == "F-": + skipmem = True + else: + skipmem = True + if dbname ==":memory:": # just run, and save the sql file print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'] @@ -260,6 +221,10 @@ class JobQueue(): self.runqonce() self.memdumpsql() self.saveprofiles() + elif skipmem: + print "-- DATABASES.default", settings.DATABASES['default'] + self.runqonce() + self.saveprofiles() else: django.db.close_old_connections() # needed if MySQL running? # run all the imports through :memory: first @@ -397,6 +362,7 @@ def usage(): and [runlabel] is an optional string identifying this run of the script in the stored profiling data 'import-profile.json' + if [runlabel] is absent or begins with "F-" then it will skip the :memory: pass caves and logbooks must be run on an empty db before the others as they set up db tables used by the others. |