summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPhilip Sargent <philip@Muscogee.localdomain>2020-05-24 13:35:47 +0100
committerPhilip Sargent <philip@Muscogee.localdomain>2020-05-24 13:35:47 +0100
commitb69bdcd1262cd04208abf29480e73a3119e5feee (patch)
tree50e82a817f31e1e4e27af24fb9f35d0cf25dcc2f
parent49d5857b36e4b8acb0949e11718fb465c8b485c2 (diff)
downloadtroggle-b69bdcd1262cd04208abf29480e73a3119e5feee.tar.gz
troggle-b69bdcd1262cd04208abf29480e73a3119e5feee.tar.bz2
troggle-b69bdcd1262cd04208abf29480e73a3119e5feee.zip
tidying and prep for python3
-rw-r--r--README.txt6
-rw-r--r--databaseReset.py92
-rw-r--r--parsers/survex.py8
-rw-r--r--parsers/surveys.py30
4 files changed, 75 insertions, 61 deletions
diff --git a/README.txt b/README.txt
index 677781c..1ad17bb 100644
--- a/README.txt
+++ b/README.txt
@@ -44,12 +44,12 @@ pip install pygraphviz # fails to install
pip install pyparsing pydot # installs fine
django extension graph_models # https://django-extensions.readthedocs.io/en/latest/graph_models.html
-Or use a python3 virtual environment:
+Or use a python3 virtual environment: (python3.5 not later)
$ cd troggle
$ cd ..
-$ python3 -m venv pyth3d2
+$ python3.5 -m venv pyth35d2
(creates folder with virtual env)
-cd pyth3d2
+cd pyth35d2
bin/activate
(now install everything - not working yet..)
$ pip install -r requirements.txt
diff --git a/databaseReset.py b/databaseReset.py
index fc12dde..f08e3a6 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -1,20 +1,26 @@
+from __future__ import (absolute_import, division,
+ print_function)
import os
import time
import timeit
+import json
+
import settings
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
+
from django.core import management
from django.db import connection, close_old_connections
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.core.urlresolvers import reverse
+
from troggle.core.models import Cave, Entrance
+import troggle.settings
import troggle.flatpages.models
-import json
import troggle.logbooksdump
-# NOTE databaseRest.py is *imported* by views_other.py as it is used in the control panel
+# NOTE databaseReset.py is *imported* by views_other.py as it is used in the control panel
# presented there.
expouser=settings.EXPOUSER
@@ -60,52 +66,52 @@ def dirsredirect():
f.save()
def import_caves():
- import parsers.caves
+ import troggle.parsers.caves
print("Importing Caves")
- parsers.caves.readcaves()
+ troggle.parsers.caves.readcaves()
def import_people():
- import parsers.people
+ import troggle.parsers.people
print("Importing People (folk.csv)")
- parsers.people.LoadPersonsExpos()
+ troggle.parsers.people.LoadPersonsExpos()
def import_logbooks():
- import parsers.logbooks
+ import troggle.parsers.logbooks
print("Importing Logbooks")
- parsers.logbooks.LoadLogbooks()
+ troggle.parsers.logbooks.LoadLogbooks()
def import_QMs():
print("Importing QMs (old caves)")
- import parsers.QMs
+ import troggle.parsers.QMs
# import process itself runs on qm.csv in only 3 old caves, not the modern ones!
def import_survexblks():
- import parsers.survex
+ import troggle.parsers.survex
print("Importing Survex Blocks")
- parsers.survex.LoadAllSurvexBlocks()
+ troggle.parsers.survex.LoadAllSurvexBlocks()
def import_survexpos():
- import parsers.survex
+ import troggle.parsers.survex
print("Importing Survex x/y/z Positions")
- parsers.survex.LoadPos()
+ troggle.parsers.survex.LoadPos()
def import_surveyimgs():
"""This appears to store data in unused objects. The code is kept
for future re-working to manage progress against notes, plans and elevs.
"""
- import parsers.surveys
- print("Importing survey images")
- parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
+ #import troggle.parsers.surveys
+ print("NOT Importing survey images")
+ #troggle.parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
def import_surveyscans():
- import parsers.surveys
+ import troggle.parsers.surveys
print("Importing Survey Scans")
- parsers.surveys.LoadListScans()
+ troggle.parsers.surveys.LoadListScans()
def import_tunnelfiles():
- import parsers.surveys
+ import troggle.parsers.surveys
print("Importing Tunnel files")
- parsers.surveys.LoadTunnelFiles()
+ troggle.parsers.surveys.LoadTunnelFiles()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# These functions moved to a different file - not used currently.
@@ -152,7 +158,7 @@ class JobQueue():
for j in data:
self.results[j] = data[j]
except:
- print "FAILURE parsing JSON file %s" % (self.tfile)
+ print("FAILURE parsing JSON file %s" % (self.tfile))
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
f.close()
for j in self.results_order:
@@ -176,7 +182,7 @@ class JobQueue():
"""Run all the jobs in the queue provided - once
"""
- print "** Running job ", self.runlabel
+ print("** Running job ", self.runlabel)
jobstart = time.time()
self.results["date"].pop()
self.results["date"].append(jobstart)
@@ -187,14 +193,14 @@ class JobQueue():
start = time.time()
i[1]() # looks ugly but invokes function passed in the second item in the tuple
duration = time.time()-start
- print "\n*- Ended \"", i[0], "\" %.1f seconds" % duration
+ print("\n*- Ended \"", i[0], "\" %.1f seconds" % duration)
self.results[i[0]].pop() # the null item
self.results[i[0]].append(duration)
jobend = time.time()
jobduration = jobend-jobstart
- print "** Ended job %s - %.1f seconds total." % (self.runlabel,jobduration)
+ print("** Ended job %s - %.1f seconds total." % (self.runlabel,jobduration))
return True
@@ -221,7 +227,7 @@ class JobQueue():
else:
skipmem = True
- print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
+ print("-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'])
#print "-- DATABASES.default", settings.DATABASES['default']
if dbname ==":memory:":
@@ -251,8 +257,8 @@ class JobQueue():
'PORT': ''}
- print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
- print "-- DATABASES.default", settings.DATABASES['default']
+ print("-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'])
+ #print("-- DATABASES.default", settings.DATABASES['default'])
# but because the user may be expecting to add this to a db with lots of tables already there,
# the jobqueue may not start from scratch so we need to initialise the db properly first
@@ -282,7 +288,7 @@ class JobQueue():
settings.DATABASES['default'] = dbdefault
settings.DATABASES['default']['ENGINE'] = dbengine
settings.DATABASES['default']['NAME'] = dbname
- print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
+ print("-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'])
django.db.close_old_connections() # maybe not needed here
for j in self.results_order:
@@ -308,9 +314,9 @@ class JobQueue():
elif k =="test":
break
elif k =="date":
- print " days ago ",
+ print(" days ago ", end=' ')
else:
- print '%10s (s)' % k,
+ print('%10s (s)' % k, end=' ')
percen=0
r = self.results[k]
@@ -320,26 +326,30 @@ class JobQueue():
rp = r[i]
else:
rp = " - "
- print '%8s' % rp,
+ print('%8s' % rp, end=' ')
elif k =="date":
# Calculate dates as days before present
if r[i]:
if i == len(r)-1:
- print " this",
+ print(" this", end=' ')
else:
# prints one place to the left of where you expect
- days = (r[i]-r[len(r)-1])/(24*60*60)
- print '%8.2f' % days,
+ if r[len(r)-1]:
+ s = r[i]-r[len(r)-1]
+ else:
+ s = 0
+ days = (s)/(24*60*60)
+ print('%8.2f' % days, end=' ')
elif r[i]:
- print '%8.1f' % r[i],
+ print('%8.1f' % r[i], end=' ')
if i == len(r)-1 and r[i-1]:
percen = 100* (r[i] - r[i-1])/r[i-1]
if abs(percen) >0.1:
- print '%8.1f%%' % percen,
+ print('%8.1f%%' % percen, end=' ')
else:
- print " - ",
- print ""
- print "\n"
+ print(" - ", end=' ')
+ print("")
+ print("\n")
return True
@@ -438,13 +448,15 @@ if __name__ == "__main__":
elif "dumplogbooks" in sys.argv: # untested in 2020
dumplogbooks()
elif "profile" in sys.argv:
+ jq.loadprofiles()
jq.showprofile()
+ exit()
elif "help" in sys.argv:
usage()
exit()
else:
usage()
- print("%s not recognised as a command." % sys.argv[1])
+ print(("%s not recognised as a command." % sys.argv[1]))
exit()
jq.run()
diff --git a/parsers/survex.py b/parsers/survex.py
index 9725ce7..38cae62 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -27,6 +27,8 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
+ # this next fails for two surface survey svx files which use / for decimal point
+ # e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
if stardata["type"] == "normal":
try:
survexleg.tape = float(ls[stardata["tape"]])
@@ -34,7 +36,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
print("! Tape misread in", survexblock.survexfile.path)
print(" Stardata:", stardata)
print(" Line:", ls)
- message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
+ message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
survexleg.tape = 1000
try:
@@ -43,7 +45,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
print("! Clino misread in", survexblock.survexfile.path)
print(" Stardata:", stardata)
print(" Line:", ls)
- message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
+ message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
lclino = error
try:
@@ -52,7 +54,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
print("! Compass misread in", survexblock.survexfile.path)
print(" Stardata:", stardata)
print(" Line:", ls)
- message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
+ message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
models.DataIssue.objects.create(parser='survex', message=message)
lcompass = error
if lclino == "up":
diff --git a/parsers/surveys.py b/parsers/surveys.py
index ec6298c..942c0a5 100644
--- a/parsers/surveys.py
+++ b/parsers/surveys.py
@@ -82,14 +82,14 @@ def get_or_create_placeholder(year):
# logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
# dead
-def listdir(*directories):
- try:
- return os.listdir(os.path.join(settings.SURVEYS, *directories))
- except:
- import urllib.request, urllib.parse, urllib.error
- url = settings.SURVEYS + reduce(lambda x, y: x + "/" + y, ["listdir"] + list(directories))
- folders = urllib.request.urlopen(url.replace("#", "%23")).readlines()
- return [folder.rstrip(r"/") for folder in folders]
+# def listdir(*directories):
+ # try:
+ # return os.listdir(os.path.join(settings.SURVEYS, *directories))
+ # except:
+ # import urllib.request, urllib.parse, urllib.error
+ # url = settings.SURVEYS + reduce(lambda x, y: x + "/" + y, ["listdir"] + list(directories))
+ # folders = urllib.request.urlopen(url.replace("#", "%23")).readlines()
+ # return [folder.rstrip(r"/") for folder in folders]
# add survey scans
# def parseSurveyScans(expedition, logfile=None):
@@ -171,13 +171,13 @@ def listdir(*directories):
# parseSurveyScans(expedition)
# dead
-def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
- file=Image.open(filePath)
- print(filePath)
- if 'interlace' in file.info:
- return file.info['interlace']
- else:
- return False
+# def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
+ # file=Image.open(filePath)
+ # print(filePath)
+ # if 'interlace' in file.info:
+ # return file.info['interlace']
+ # else:
+ # return False
# handles url or file, so we can refer to a set of scans on another server