summaryrefslogtreecommitdiffstats
path: root/databaseReset.py
diff options
context:
space:
mode:
authorPhilip Sargent <philip.sargent@klebos.com>2020-06-08 00:11:09 +0100
committerPhilip Sargent <philip.sargent@klebos.com>2020-06-08 00:11:09 +0100
commit538a3b6ca839e884a541e39f517a7c64b363530a (patch)
tree66367109b463b140cd917bf052a1fb00f2584782 /databaseReset.py
parent9237a6262ef310d57df6e40631c7de738cdc2f05 (diff)
downloadtroggle-538a3b6ca839e884a541e39f517a7c64b363530a.tar.gz
troggle-538a3b6ca839e884a541e39f517a7c64b363530a.tar.bz2
troggle-538a3b6ca839e884a541e39f517a7c64b363530a.zip
fixed circular ref on setup & in-memory db
Diffstat (limited to 'databaseReset.py')
-rw-r--r--databaseReset.py222
1 files changed, 121 insertions, 101 deletions
diff --git a/databaseReset.py b/databaseReset.py
index 2d08492..c85be8c 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -9,11 +9,13 @@ os.environ['PYTHONPATH'] = settings.PYTHON_PATH
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
from django.core import management
-from django.db import connection, close_old_connections
+from django.db import connection, close_old_connections, connections
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.core.urlresolvers import reverse
+print(" 1 settings on loading databaseReset.py")
+
from troggle.core.models_caves import Cave, Entrance
import troggle.parsers.caves
#import troggle.settings
@@ -36,67 +38,117 @@ if os.geteuid() == 0:
print("This script should be run as expo not root - quitting")
exit()
+dbengine = ""
+dbname = ""
+dbdefault =""
+
expouser=settings.EXPOUSER
expouserpass=settings.EXPOUSERPASS
expouseremail=settings.EXPOUSER_EMAIL
-print(" - settings: {} ({:.5}...) <{}> on module loading".format(expouser, expouserpass, expouseremail))
-
def reinit_db():
"""Rebuild database from scratch. Deletes the file first if sqlite is used,
otherwise it drops the database and creates it.
+ Note - initial loading of troggle.sqlite will already have populated the models
+ in memory (django python models, not the database), so there is already a full load
+ of stuff known. Deleting the db file does not clear memory.
"""
- django.db.close_old_connections() # wipes an in-memory sqlite db
currentdbname = settings.DATABASES['default']['NAME']
- if settings.DATABASES['default']['NAME'] == ':memory:':
- pass
- elif settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
+ if currentdbname == ':memory:':
+ # closing connections should wipe the in-memory database
+ django.db.close_old_connections()
+ for conn in django.db.connections.all():
+ print(" ! Closing another connection to db...")
+ conn.close()
+ elif django.db.connections.databases['default']['ENGINE'] == 'django.db.backends.sqlite3':
try:
os.remove(currentdbname)
except OSError:
print(" ! OSError on removing: " + currentdbname + " (Is the file open in another app?\n")
raise
else:
- cursor = connection.cursor()
+ cursor = django.db.connection.cursor()
cursor.execute("DROP DATABASE %s" % currentdbname)
cursor.execute("CREATE DATABASE %s" % currentdbname)
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % currentdbname)
cursor.execute("USE %s" % currentdbname)
- print(" - SETTINGS: {} ({:.5}...) <{}> before calling syncuser()".format(expouser, expouserpass, expouseremail))
- syncuser()
-def syncuser():
- """Sync user - needed after reload
- """
- print(" - Synchronizing user on: " + settings.DATABASES['default']['NAME'])
+ #Sync user - needed after reload
+ print(" - Migrating: " + settings.DATABASES['default']['NAME'])
+ print(django.db.connections.databases['default']['NAME'])
+
management.call_command('migrate', interactive=False)
+ print(" - done migration on: " + settings.DATABASES['default']['NAME'])
try:
+ print(" - Setting up admin user on: " + settings.DATABASES['default']['NAME'])
+ print(django.db.connections.databases['default']['NAME'])
+ print(" - user: {} ({:.5}...) <{}> ".format(expouser, expouserpass, expouseremail))
user = User.objects.create_user(expouser, expouseremail, expouserpass)
user.is_staff = True
user.is_superuser = True
user.save()
except:
print(" ! INTEGRITY ERROR user on: " + settings.DATABASES['default']['NAME'])
+ print(django.db.connections.databases['default']['NAME'])
print(" ! You probably have not got a clean db when you thought you had.\n")
- raise
+ print(" ! Also you are probably NOT running an in-memory db now.\n")
+ memdumpsql(fn='integrityfail.sql')
+ django.db.connections.databases['default']['NAME'] = ':memory:'
+ #raise
+
+def memdumpsql(fn):
+ djconn = django.db.connection
+ from dump import _iterdump
+ with open(fn, 'w') as f:
+ for line in _iterdump(djconn):
+ f.write('%s\n' % line.encode("utf8"))
+ return True
+
+def store_dbsettings():
+ global dbengine
+ global dbname
+ global dbdefault
+ dbengine = settings.DATABASES['default']['ENGINE']
+ dbname = settings.DATABASES['default']['NAME']
+ dbdefault = settings.DATABASES['default']
+
+def restore_dbsettings():
+ settings.DATABASES['default'] = dbdefault
+ settings.DATABASES['default']['ENGINE'] = dbengine
+ settings.DATABASES['default']['NAME'] = dbname
+ django.db.connections.databases['default'] = dbdefault
+ django.db.connections.databases['default']['ENGINE'] = dbengine
+ django.db.connections.databases['default']['NAME'] = dbname
+
+def set_in_memory_dbsettings():
+ django.db.close_old_connections() # needed if MySQL running?
+ settings.DATABASES['default'] = {'ENGINE': 'django.db.backends.sqlite3',
+ 'AUTOCOMMIT': True,
+ 'ATOMIC_REQUESTS': False,
+ 'NAME': ':memory:',
+ 'CONN_MAX_AGE': 0,
+ 'TIME_ZONE': 'UTC',
+ 'OPTIONS': {},
+ 'HOST': '',
+ 'USER': '',
+ 'TEST': {'COLLATION': None, 'CHARSET': None, 'NAME': None, 'MIRROR': None},
+ 'PASSWORD': '',
+ 'PORT': ''}
+ settings.DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3'
+ settings.DATABASES['default']['NAME'] = ':memory:'
+ django.db.connections.databases['default']['ENGINE'] = 'django.db.backends.sqlite3'
+ django.db.connections.databases['default']['NAME'] = ':memory:'
-def dirsredirect():
- """Make directories that troggle requires and sets up page redirects
- """
- #should also deal with permissions here.
- #if not os.path.isdir(settings.PHOTOS_ROOT):
- #os.mkdir(settings.PHOTOS_ROOT)
- for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
- f = troggle.flatpages.models.Redirect(originalURL = oldURL, newURL = newURL)
- f.save()
def import_caves():
- print("Importing Caves")
+ print("Importing Caves to ",end="")
+ print(django.db.connections.databases['default']['NAME'])
troggle.parsers.caves.readcaves()
def import_people():
- print("Importing People (folk.csv)")
+ print("Importing People (folk.csv) to ",end="")
+ print(django.db.connections.databases['default']['NAME'])
troggle.parsers.people.LoadPersonsExpos()
def import_surveyscans():
@@ -152,9 +204,6 @@ class JobQueue():
"""A list of import operations to run. Always reports profile times
in the same order.
"""
- dbengine = ""
- dbname = ""
- dbdefault =""
def __init__(self,run):
self.runlabel = run
@@ -163,7 +212,7 @@ class JobQueue():
self.results_order=[
"date","runlabel","reinit", "caves", "people",
"logbooks", "QMs", "scans", "survexblks", "survexpos",
- "tunnel", "surveyimgs", "test", "dirsredirect", "syncuser" ]
+ "tunnel", "surveyimgs", "test" ]
for k in self.results_order:
self.results[k]=[]
self.tfile = "import_profile.json"
@@ -202,19 +251,13 @@ class JobQueue():
json.dump(self.results, f)
return True
- def memdumpsql(self):
- djconn = django.db.connection
- from dump import _iterdump
- with open('memdump.sql', 'w') as f:
- for line in _iterdump(djconn):
- f.write('%s\n' % line.encode("utf8"))
- return True
def runqonce(self):
"""Run all the jobs in the queue provided - once
"""
- print("** Running job ", self.runlabel)
+ print("** Running job ", self.runlabel,end=" to ")
+ print(django.db.connections.databases['default']['NAME'])
jobstart = time.time()
self.results["date"].pop()
self.results["date"].append(jobstart)
@@ -235,43 +278,18 @@ class JobQueue():
print("** Ended job %s - %.1f seconds total." % (self.runlabel,jobduration))
return True
- def store_dbsettings(self):
- self.dbengine = settings.DATABASES['default']['ENGINE']
- self.dbname = settings.DATABASES['default']['NAME']
- self.dbdefault = settings.DATABASES['default']
-
- def restore_dbsettings(self):
- settings.DATABASES['default'] = self.dbdefault
- settings.DATABASES['default']['ENGINE'] = self.dbengine
- settings.DATABASES['default']['NAME'] = self.dbname
-
- def set_in_memory_dbsettings(self):
- django.db.close_old_connections() # needed if MySQL running?
- settings.DATABASES['default'] = {'ENGINE': 'django.db.backends.sqlite3',
- 'AUTOCOMMIT': True,
- 'ATOMIC_REQUESTS': False,
- 'NAME': ':memory:',
- 'CONN_MAX_AGE': 0,
- 'TIME_ZONE': 'UTC',
- 'OPTIONS': {},
- 'HOST': '',
- 'USER': '',
- 'TEST': {'COLLATION': None, 'CHARSET': None, 'NAME': None, 'MIRROR': None},
- 'PASSWORD': '',
- 'PORT': ''}
- settings.DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3'
- settings.DATABASES['default']['NAME'] = ":memory:"
def append_placeholders(self):
for j in self.results_order:
self.results[j].append(None) # append a placeholder
def run_now_django_tests(self,n):
- self.store_dbsettings()
+ store_dbsettings()
# this leaves the db set to :memory: whatever it was initially
management.call_command('test', verbosity=n)
django.db.close_old_connections()
- self.restore_dbsettings()
+ restore_dbsettings()
+ # and whatever I do, it stays that way !
def skip_memory_phase(self):
if not self.runlabel:
@@ -289,36 +307,33 @@ class JobQueue():
relinquish some kind of db connection (not fixed yet)
"""
self.loadprofiles()
- self.store_dbsettings()
+ store_dbsettings()
+
+ print("-- start ", settings.DATABASES['default']['ENGINE'], settings.DATABASES['default']['NAME'])
+ print(django.db.connections.databases['default']['NAME'])
- print("-- phase 0 ", settings.DATABASES['default']['ENGINE'], settings.DATABASES['default']['NAME'])
- #print "-- DATABASES.default", settings.DATABASES['default']
- if self.dbname ==":memory:":
+ if dbname ==":memory:":
# just run, and save the sql file
self.runqonce()
- self.memdumpsql() # saved contents of scratch db, could be imported later..
+ memdumpsql('memdump.sql') # saved contents of scratch db, could be imported later..
self.saveprofiles()
elif self.skip_memory_phase():
self.runqonce()
self.saveprofiles()
else:
# run all the imports through :memory: first
- self.set_in_memory_dbsettings()
+ set_in_memory_dbsettings()
print("-- phase 1 ", settings.DATABASES['default']['ENGINE'], settings.DATABASES['default']['NAME'])
- #print("-- DATABASES.default", settings.DATABASES['default'])
- # but because the user may be expecting to add this to a db with lots of tables already there,
# the jobqueue may not start from scratch so we need to initialise the db properly first
# because we are using an empty :memory: database
# But initiating twice crashes it; so be sure to do it once only.
- # Damn. syncdb() is still calling MySQL somehow **conn_params not sqlite3. So crashes on expo server.
+ # Damn. migrate() is still calling MySQL somehow **conn_params not sqlite3. So crashes on expo server.
if ("reinit",reinit_db) not in self.queue:
reinit_db()
- if ("dirsredirect",dirsredirect) not in self.queue:
- dirsredirect()
if ("caves",import_caves) not in self.queue:
import_caves() # sometime extract the initialising code from this and put in reinit...
if ("people",import_people) not in self.queue:
@@ -327,21 +342,22 @@ class JobQueue():
django.db.close_old_connections() # maybe not needed here
self.runqonce()
- self.memdumpsql()
+ memdumpsql('memdump2.sql')
self.showprofile()
# restore the original db and import again
# if we wanted to, we could re-import the SQL generated in the first pass to be
# blazing fast. But for the present just re-import the lot.
- self.restore_dbsettings()
+ restore_dbsettings()
print("-- phase 2 ", settings.DATABASES['default']['ENGINE'], settings.DATABASES['default']['NAME'])
-
+ print(django.db.connections.databases['default']['NAME'])
+
django.db.close_old_connections() # maybe not needed here
for j in self.results_order:
self.results[j].pop() # throw away results from :memory: run
self.append_placeholders()
- django.db.close_old_connections() # magic rune. works. found by looking in django.db__init__.py
+ django.db.close_old_connections()
#django.setup() # should this be needed?
self.runqonce()
@@ -353,13 +369,7 @@ class JobQueue():
"""Prints out the time it took to run the jobqueue
"""
for k in self.results_order:
- if k =="dirsredirect":
- break
- if k =="surveyimgs":
- break
- elif k =="syncuser":
- break
- elif k =="test":
+ if k =="test":
break
elif k =="date":
print(" days ago ", end=' ')
@@ -384,6 +394,8 @@ class JobQueue():
# prints one place to the left of where you expect
if r[len(r)-1]:
s = r[i]-r[len(r)-1]
+ elif r[len(r)-2]:
+ s = r[i]-r[len(r)-2]
else:
s = 0
days = (s)/(24*60*60)
@@ -408,34 +420,35 @@ def usage():
profile - print the profile from previous runs. Import nothing.
reset - normal usage: clear database and reread everything from files - time-consuming
- caves - read in the caves (must run first after reset)
- people - read in the people from folk.csv (must run before logbooks)
+ caves - read in the caves (must run first after initialisation)
+ people - read in the people from folk.csv (must run after 'caves')
logbooks - read in the logbooks
QMs - read in the QM csv files (older caves only)
scans - the survey scans in all the wallets (must run before survex)
survex - read in the survex files - all the survex blocks but not the x/y/z positions
- survexpos - just the x/y/z Pos out of the survex files
+ survexpos - just the x/y/z Pos out of the survex files (not needed)
tunnel - read in the Tunnel files - which scans the survey scans too
- reinit - clear database (delete everything) and make empty tables. Import nothing.
- syncuser - needed after reloading database from SQL backup
autologbooks - Not used. read in autologbooks (what are these?)
dumplogbooks - Not used. write out autologbooks (not working?)
surveyimgs - Not used. read in scans by-expo, must run after "people".
and [runlabel] is an optional string identifying this run of the script
in the stored profiling data 'import-profile.json'
+
if [runlabel] is absent or begins with "F-" then it will skip the :memory: pass
caves and logbooks must be run on an empty db before the others as they
set up db tables used by the others.
- the in-memory phase is on an empty db, so always runs reinit, caves & people for this phase
+ the commands are first run on an in-memory empty database before being run on
+ the actual persistent database. This is very fast and checks for import errors.
+
+ the initial in-memory phase is on an empty db, so always runs caves & people for this phase
""")
if __name__ == "__main__":
- django.setup()
if os.geteuid() == 0:
print("Do not run as root or using sudo - file permissions for cache files and logs will break")
@@ -446,8 +459,17 @@ if __name__ == "__main__":
else:
runlabel=None
+ store_dbsettings()
+ set_in_memory_dbsettings()
+ print(" - django.setup - next")
+ try:
+ django.setup()
+ except:
+ print(" ! COMPLICATED FAILURE. Does not occur with a valid 'troggle.sqlite' database in place.")
+ raise
+ print(" - django.setup - done")
+
jq = JobQueue(runlabel)
- #jq.run_now_django_tests(1)
if len(sys.argv)==1:
usage()
@@ -455,7 +477,7 @@ if __name__ == "__main__":
elif "test" in sys.argv:
jq.enq("caves",import_caves)
jq.enq("people",import_people)
- jq.run_now_django_tests(2)
+ #jq.run_now_django_tests(2)
elif "caves" in sys.argv:
jq.enq("caves",import_caves)
elif "logbooks" in sys.argv:
@@ -466,7 +488,6 @@ if __name__ == "__main__":
jq.enq("QMs",import_QMs)
elif "reset" in sys.argv:
jq.enq("reinit",reinit_db)
- jq.enq("dirsredirect",dirsredirect)
jq.enq("caves",import_caves)
jq.enq("people",import_people)
jq.enq("scans",import_surveyscans)
@@ -493,9 +514,6 @@ if __name__ == "__main__":
# writeCaves()
elif "profile" in sys.argv:
jq.loadprofiles()
- # need to increment everything runq does
- print("!! - days before appears as 0.00 - to be fixed")
- jq.append_placeholders()
jq.showprofile()
exit()
elif "help" in sys.argv:
@@ -506,5 +524,7 @@ if __name__ == "__main__":
print("%s not recognised as a command." % sys.argv[1])
exit()
+ #jq.run_now_django_tests(1)
+
jq.run()
jq.showprofile()