summaryrefslogtreecommitdiffstats
path: root/databaseReset.py
diff options
context:
space:
mode:
authorPhilip Sargent <philip@Muscogee.localdomain>2020-04-28 18:26:08 +0100
committerPhilip Sargent <philip@Muscogee.localdomain>2020-04-28 18:26:08 +0100
commitb4c0c4d21922f99e99906f934d1e261cefda3915 (patch)
treeceb529912aa7e481937d27955deeac1a767b39aa /databaseReset.py
parent4be8c8129183888c8a0d62536ee6009a99dc53fb (diff)
downloadtroggle-b4c0c4d21922f99e99906f934d1e261cefda3915.tar.gz
troggle-b4c0c4d21922f99e99906f934d1e261cefda3915.tar.bz2
troggle-b4c0c4d21922f99e99906f934d1e261cefda3915.zip
Understanding and speeding up LoadPos
Diffstat (limited to 'databaseReset.py')
-rw-r--r--databaseReset.py23
1 files changed, 14 insertions, 9 deletions
diff --git a/databaseReset.py b/databaseReset.py
index cc5e20f..686b1ba 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -168,8 +168,8 @@ class JobQueue():
self.results = {}
self.results_order=[
"date","runlabel","reinit", "caves", "people",
- "logbooks", "scans", "QMs", "survexblks",
- "tunnel", "surveyimgs", "test", "dirsredirect", "syncuser", "survexpos" ]
+ "logbooks", "QMs", "survexblks", "survexpos",
+ "tunnel", "scans", "surveyimgs", "test", "dirsredirect", "syncuser" ]
for k in self.results_order:
self.results[k]=[]
self.tfile = "import_profile.json"
@@ -197,10 +197,15 @@ class JobQueue():
print "FAILURE parsing JSON file %s" % (self.tfile)
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
f.close()
-
+
+ for j in self.results_order:
+ self.results[j].append(None) # append a placeholder
+
print "** Running job ", self.runlabel
jobstart = time.time()
+ self.results["date"].pop()
self.results["date"].append(jobstart)
+ self.results["runlabel"].pop()
self.results["runlabel"].append(self.runlabel)
for i in self.queue:
@@ -208,6 +213,7 @@ class JobQueue():
i[1]() # looks ugly but invokes function passed in the second item in the tuple
duration = time.time()-start
print "\n*- Ended \"", i[0], "\" %.1f seconds" % duration
+ self.results[i[0]].pop() # the null item
self.results[i[0]].append(duration)
with open(self.tfile, 'w') as f:
@@ -241,9 +247,9 @@ class JobQueue():
elif k =="test":
break
elif k =="date":
- print " days ago ",
+ print " days ago ",
else:
- print '%9s (s)' % k,
+ print '%10s (s)' % k,
percen=0
r = self.results[k]
#print "min=",min
@@ -286,7 +292,7 @@ def usage():
QMs - read in the QM csv files (older caves only)
reinit - clear database (delete everything) and make empty tables. Import nothing.
scans - the survey scans in all the wallets
- survex - read in the survex files - all the survex blocks and the x/y/z positions
+ survex - read in the survex files - all the survex blocks but not the x/y/z positions
survexpos - just the x/y/z Pos out of the survex files
tunnel - read in the Tunnel files - which scans the survey scans too
@@ -326,9 +332,8 @@ if __name__ == "__main__":
jq.enq("reinit",reinit_db)
jq.enq("dirsredirect",dirsredirect)
jq.enq("caves",import_caves)
- jq.enq("people",import_people)
- jq.enq("survex",import_survexblks)
- #jq.enq("logbooks",import_logbooks)
+ jq.enq("survexblks",import_survexblks)
+ jq.enq("survexpos",import_survexpos)
elif "caves" in sys.argv:
jq.enq("caves",import_caves)
elif "logbooks" in sys.argv: