lengths[j]:
- lengths[j]=len(i[j])
- j+=1
-
- print ("-"*(lengths[0]+lengths[1]+4))
- for i in alldata:
- print (("%-"+str(lengths[0])+"s - %-"+str(lengths[1])+"s") % (i[0],i[1]))
- if len(i)>2:
- for j in i[2:]:print (("%"+str(lengths[0]+9)+"s* %s") % (" ",j))
+ j = 0
+ while j < len(i):
+ if len(i[j]) > lengths[j]:
+ lengths[j] = len(i[j])
+ j += 1
+
+ print ("-" * (lengths[0] + lengths[1] + 4))
+ for i in alldata:
+ print (("%-" + str(lengths[0]) + "s - %-" + str(
+ lengths[1]) + "s") % (i[0], i[1]))
+ if len(i) > 2:
+ for j in i[2:]: print (("%" + str(lengths[
+ 0] + 9) + "s* %s") % (" ", j))
print
- def cmd_vars(self,*args):
+ def cmd_vars(self, *args):
'''-2|vars|Show variables'''
- print ("variables\r\n"+"-"*79)
- for i,j in self.configvars.items():
- value=self.parfmt(repr(getattr(self,j)),52)
- print ("| %20s | %52s |" % (i,value[0]))
- for k in value[1:]:print ("| %20s | %52s |" % ("",k))
- if len(value)>1:print("| %20s | %52s |" % ("",""))
- print ("-"*79)
-
- def parfmt(self,txt,width):
- res=[]
- pos=0
+ print ("variables\r\n" + "-" * 79)
+ for i, j in self.configvars.items():
+ value = self.parfmt(repr(getattr(self, j)), 52)
+ print ("| %20s | %52s |" % (i, value[0]))
+ for k in value[1:]: print ("| %20s | %52s |" % ("", k))
+ if len(value) > 1:
+ print("| %20s | %52s |" % ("", ""))
+ print ("-" * 79)
+
+ def parfmt(self, txt, width):
+ res = []
+ pos = 0
while True:
- a=txt[pos:pos+width]
- if not a:break
+ a = txt[pos:pos + width]
+ if not a:
+ break
res.append(a)
- pos+=width
+ pos += width
return res
-
- def cmd_set(self,*args):
+
+ def cmd_set(self, *args):
'''-1|set [variable_name] [value]|Set configuration variable value|Values are an expressions (100 | string.lower('ABC') | etc.'''
- value=" ".join(args[1:])
+ value = " ".join(args[1:])
if args[0] not in self.configvars:
- setattr(self,"var_{0}".format(args[0]),eval(value))
- setattr(self,"var_{0}".format(args[0]),eval(value))
-
- def cmd_clearscreen(self,numlines=50):
+ setattr(self, "var_{0}".format(args[0]), eval(value))
+ setattr(self, "var_{0}".format(args[0]), eval(value))
+
+ def cmd_clearscreen(self, numlines=50):
'''---Clear the console.
---'''
if os.name == "posix":
@@ -413,75 +430,81 @@ style choices:
'''---
Fallback for other operating systems.
---'''
- print '\n'*numlines
+ print '\n' * numlines
+
class dalShell(console):
def __init__(self):
pass
-
- def shell(self,db):
- console.__init__(self,prompt=">>> ",banner='dal interactive shell')
+
+ def shell(self, db):
+ console.__init__(self, prompt=">>> ", banner='dal interactive shell')
self.execCmd(db)
+
class setCopyDB():
def __init__(self):
'''---
non source or target specific vars
- ---'''
- self.strModel=None
- self.dalPath=None
- self.db=None
+ ---'''
+ self.strModel = None
+ self.dalPath = None
+ self.db = None
'''---
source vars
- ---'''
- self.sourceModel=None
- self.sourceFolder=None
- self.sourceConnectionString=None
- self.sourcedbType=None
- self.sourcedbName=None
+ ---'''
+ self.sourceModel = None
+ self.sourceFolder = None
+ self.sourceConnectionString = None
+ self.sourcedbType = None
+ self.sourcedbName = None
'''---
target vars
---'''
- self.targetdbType=None
- self.targetdbName=None
- self.targetModel=None
- self.targetFolder=None
- self.targetConnectionString=None
- self.truncate=False
+ self.targetdbType = None
+ self.targetdbName = None
+ self.targetModel = None
+ self.targetFolder = None
+ self.targetConnectionString = None
+ self.truncate = False
def _getDal(self):
- mDal=None
+ mDal = None
if self.dalPath is not None:
global DAL
- sys.path.append(self.dalPath)
- mDal=__import__('dal',globals={},locals={},fromlist=['DAL'],level=0)
- DAL=mDal.DAL
- return mDal
-
- def instDB(self,storageFolder,storageConnectionString,autoImport):
- self.db=DAL(storageConnectionString,folder=os.path.abspath(storageFolder),auto_import=autoImport)
+ sys.path.append(self.dalPath)
+ mDal = __import__(
+ 'dal', globals={}, locals={}, fromlist=['DAL'], level=0)
+ DAL = mDal.DAL
+ return mDal
+
+ def instDB(self, storageFolder, storageConnectionString, autoImport):
+ self.db = DAL(storageConnectionString, folder=os.path.abspath(
+ storageFolder), auto_import=autoImport)
return self.db
-
- def delete_DB_tables(self,storageFolder,storageType):
- print 'delete_DB_tablesn\n\t{0}\n\t{1}'.format(storageFolder,storageType)
- dataFiles=[storageType,"sql.log"]
+
+ def delete_DB_tables(self, storageFolder, storageType):
+ print 'delete_DB_tablesn\n\t{0}\n\t{1}'.format(
+ storageFolder, storageType)
+ dataFiles = [storageType, "sql.log"]
try:
for f in os.listdir(storageFolder):
- if ".table" in f:
- fTable="{0}/{1}".format(storageFolder,f)
- os.remove(fTable)
- print('deleted {0}'.format(fTable))
- for dFile in dataFiles:
- os.remove("{0}/{1}".format(storageFolder,dFile))
- print('deleted {0}'.format("{0}/{1}".format(storageFolder,dFile)))
+ if ".table" in f:
+ fTable = "{0}/{1}".format(storageFolder, f)
+ os.remove(fTable)
+ print('deleted {0}'.format(fTable))
+ for dFile in dataFiles:
+ os.remove("{0}/{1}".format(storageFolder, dFile))
+ print('deleted {0}'.format(
+ "{0}/{1}".format(storageFolder, dFile)))
except Exception, errObj:
- print(str(errObj))
-
- def truncatetables(self,tables=[]):
- if len(tables)!=0:
+ print(str(errObj))
+
+ def truncatetables(self, tables=[]):
+ if len(tables) != 0:
try:
- print 'table value: {0}'.format(tables)
- for tbl in self.db.tables:
+ print 'table value: {0}'.format(tables)
+ for tbl in self.db.tables:
for mTbl in tables:
if mTbl.startswith(tbl):
self.db[mTbl].truncate()
@@ -490,64 +513,61 @@ class setCopyDB():
else:
try:
for tbl in self.db.tables:
- self.db[tbl].truncate()
+ self.db[tbl].truncate()
except Exception, err:
- print('EXCEPTION: {0}'.format(err))
-
+ print('EXCEPTION: {0}'.format(err))
+
def copyDB(self):
- other_db=DAL("{0}://{1}".format(self.targetdbType,self.targetdbName),folder=self.targetFolder)
-
+ other_db = DAL("{0}://{1}".format(
+ self.targetdbType, self.targetdbName), folder=self.targetFolder)
+
print 'creating tables...'
-
+
for table in self.db:
- other_db.define_table(table._tablename,*[field for field in table])
+ other_db.define_table(
+ table._tablename, *[field for field in table])
'''
should there be an option to truncAte target DB?
if yes, then change args to allow for choice
and set self.trancate to the art value
-
+
if self.truncate==True:
other_db[table._tablename].truncate()
'''
-
+
print 'exporting data...'
- self.db.export_to_csv_file(open('tmp.sql','wb'))
-
+ self.db.export_to_csv_file(open('tmp.sql', 'wb'))
+
print 'importing data...'
- other_db.import_from_csv_file(open('tmp.sql','rb'))
+ other_db.import_from_csv_file(open('tmp.sql', 'rb'))
other_db.commit()
print 'done!'
print 'Attention: do not run this program again or you end up with duplicate records'
- def createfolderPath(self,folder):
+ def createfolderPath(self, folder):
try:
- if folder!=None:os.makedirs(folder)
+ if folder is not None:
+ os.makedirs(folder)
except Exception, err:
- pass
+ pass
if __name__ == '__main__':
- oCopy=setCopyDB()
- db=None
- targetDB=None
- dbfolder=None
- clean=False
- model=None
- truncate=False
+ oCopy = setCopyDB()
+ db = None
+ targetDB = None
+ dbfolder = None
+ clean = False
+ model = None
+ truncate = False
- parser=argparse.ArgumentParser(description='\
+ parser = argparse.ArgumentParser(description='\
samplecmd line:\n\
-f ./blueLite/db_storage -i -y sqlite://storage.sqlite -Y sqlite://storage2.sqlite -d ./blueLite/pyUtils/sql/blueSQL -t True',
- epilog = '')
- reqGroup=parser.add_argument_group('Required arguments')
- reqGroup.add_argument('-f','--sourceFolder'\
- ,required=True\
- ,help="path to the 'source' folder of the 'source' DB")
- reqGroup.add_argument('-F','--targetFolder'\
- ,required=False\
- ,help="path to the 'target' folder of the 'target' DB")
- reqGroup.add_argument('-y','--sourceConnectionString'\
- ,required=True\
- ,help="source db connection string ()\n\
+ epilog='')
+ reqGroup = parser.add_argument_group('Required arguments')
+ reqGroup.add_argument('-f', '--sourceFolder', required=True, help="path to the 'source' folder of the 'source' DB")
+ reqGroup.add_argument('-F', '--targetFolder', required=False, help="path to the 'target' folder of the 'target' DB")
+ reqGroup.add_argument('-y', '--sourceConnectionString', required=True, help="source db connection string ()\n\
------------------------------------------------\n\
\
sqlite://storage.db\n\
@@ -561,19 +581,16 @@ ingres://username:password@localhost/test\n\
informix://username:password@test\n\
\
------------------------------------------------")
- reqGroup.add_argument('-Y','--targetConnectionString'\
- ,required=True\
- ,help="target db type (sqlite,mySql,etc.)")
- autoImpGroup=parser.add_argument_group('optional args (auto_import)')
- autoImpGroup.add_argument('-a','--autoimport'\
- ,required=False\
- ,help='set to True to bypass loading of the model')
-
+ reqGroup.add_argument('-Y', '--targetConnectionString', required=True,
+ help="target db type (sqlite,mySql,etc.)")
+ autoImpGroup = parser.add_argument_group('optional args (auto_import)')
+ autoImpGroup.add_argument('-a', '--autoimport', required=False, help='set to True to bypass loading of the model')
+
"""
-
- *** removing -m/-M options for now --> i need a
+
+ *** removing -m/-M options for now --> i need a
better regex to match db.define('bla')...with optional db.commit()
-
+
modelGroup=parser.add_argument_group('optional args (create model)')
modelGroup.add_argument('-m','--sourcemodel'\
,required=False\
@@ -581,72 +598,64 @@ informix://username:password@test\n\
modelGroup.add_argument('-M','--targetmodel'\
,required=False\
,help='to create a model from an existing model, point to the target model')
-
+
"""
-
- miscGroup=parser.add_argument_group('optional args/tasks')
- miscGroup.add_argument('-i','--interactive'\
- ,required=False\
- ,action='store_true'\
- ,help='run in interactive mode')
- miscGroup.add_argument('-d','--dal'\
- ,required=False\
- ,help='path to dal.py')
- miscGroup.add_argument('-t','--truncate'\
- ,choices=['True','False']\
- ,help='delete the records but *not* the table of the SOURCE DB')
- miscGroup.add_argument('-b','--tables'\
- ,required=False\
- ,type=list\
- ,help='optional list (comma delimited) of SOURCE tables to truncate, defaults to all')
- miscGroup.add_argument('-c','--clean'\
- ,required=False\
- ,help='delete the DB,tables and the log file, WARNING: this is unrecoverable')
-
- args=parser.parse_args()
- db=None
- mDal=None
+ miscGroup = parser.add_argument_group('optional args/tasks')
+ miscGroup.add_argument('-i', '--interactive', required=False, action='store_true', help='run in interactive mode')
+ miscGroup.add_argument(
+ '-d', '--dal', required=False, help='path to dal.py')
+ miscGroup.add_argument('-t', '--truncate', choices=['True', 'False'], help='delete the records but *not* the table of the SOURCE DB')
+ miscGroup.add_argument('-b', '--tables', required=False, type=list, help='optional list (comma delimited) of SOURCE tables to truncate, defaults to all')
+ miscGroup.add_argument('-c', '--clean', required=False, help='delete the DB,tables and the log file, WARNING: this is unrecoverable')
+
+ args = parser.parse_args()
+ db = None
+ mDal = None
try:
- oCopy.sourceFolder=args.sourceFolder
- oCopy.targetFolder=args.sourceFolder
- sourceItems=string.split(args.sourceConnectionString,'://')
- oCopy.sourcedbType=sourceItems[0]
- oCopy.sourcedbName=sourceItems[1]
- targetItems=string.split(args.targetConnectionString,'://')
- oCopy.targetdbType=targetItems[0]
- oCopy.targetdbName=targetItems[1]
+ oCopy.sourceFolder = args.sourceFolder
+ oCopy.targetFolder = args.sourceFolder
+ sourceItems = string.split(args.sourceConnectionString, '://')
+ oCopy.sourcedbType = sourceItems[0]
+ oCopy.sourcedbName = sourceItems[1]
+ targetItems = string.split(args.targetConnectionString, '://')
+ oCopy.targetdbType = targetItems[0]
+ oCopy.targetdbName = targetItems[1]
except Exception, err:
print('EXCEPTION: {0}'.format(err))
if args.dal:
- try:
- autoImport=True
- if args.autoimport:autoImport=args.autoimport
+ try:
+ autoImport = True
+ if args.autoimport:
+ autoImport = args.autoimport
#sif not DAL in globals:
#if not sys.path.__contains__():
- oCopy.dalPath=args.dal
- mDal=oCopy._getDal()
- db=oCopy.instDB(args.sourceFolder,args.sourceConnectionString,autoImport)
+ oCopy.dalPath = args.dal
+ mDal = oCopy._getDal()
+ db = oCopy.instDB(args.sourceFolder, args.sourceConnectionString,
+ autoImport)
except Exception, err:
- print('EXCEPTION: could not set DAL\n{0}'.format(err))
+ print('EXCEPTION: could not set DAL\n{0}'.format(err))
if args.truncate:
try:
if args.truncate:
- if args.tables:tables=string.split(string.strip(args.tables),',')
- else:oCopy.truncatetables([])
+ if args.tables:
+ tables = string.split(string.strip(args.tables), ',')
+ else:
+ oCopy.truncatetables([])
except Exception, err:
print('EXCEPTION: could not truncate tables\n{0}'.format(err))
try:
- if args.clean:oCopy.delete_DB_tables(oCopy.targetFolder,oCopy.targetType)
+ if args.clean:
+ oCopy.delete_DB_tables(oCopy.targetFolder, oCopy.targetType)
except Exception, err:
print('EXCEPTION: could not clean db\n{0}'.format(err))
-
"""
*** goes with -m/-M options... removed for now
-
+
if args.sourcemodel:
try:
oCopy.sourceModel=args.sourcemodel
@@ -658,25 +667,26 @@ source model: {0}\n\
target model: {1}\n\
{2}'.format(args.sourcemodel,args.targetmodel,err))
"""
-
+
if args.sourceFolder:
try:
- oCopy.sourceFolder=os.path.abspath(args.sourceFolder)
+ oCopy.sourceFolder = os.path.abspath(args.sourceFolder)
oCopy.createfolderPath(oCopy.sourceFolder)
except Exception, err:
- print('EXCEPTION: could not create folder path\n{0}'.format(err))
- else:oCopy.dbStorageFolder=os.path.abspath(os.getcwd())
+ print('EXCEPTION: could not create folder path\n{0}'.format(err))
+ else:
+ oCopy.dbStorageFolder = os.path.abspath(os.getcwd())
if args.targetFolder:
try:
- oCopy.targetFolder=os.path.abspath(args.targetFolder)
+ oCopy.targetFolder = os.path.abspath(args.targetFolder)
oCopy.createfolderPath(oCopy.targetFolder)
except Exception, err:
- print('EXCEPTION: could not create folder path\n{0}'.format(err))
+ print('EXCEPTION: could not create folder path\n{0}'.format(err))
if not args.interactive:
- try:
+ try:
oCopy.copyDB()
except Exception, err:
- print('EXCEPTION: could not make a copy of the database\n{0}'.format(err))
+ print('EXCEPTION: could not make a copy of the database\n{0}'.format(err))
else:
- s=dalShell()
+ s = dalShell()
s.shell(db)
diff --git a/scripts/cpplugin.py b/scripts/cpplugin.py
index fbb311c4..652b8747 100644
--- a/scripts/cpplugin.py
+++ b/scripts/cpplugin.py
@@ -1,26 +1,32 @@
-import sys, glob, os, shutil
-name=sys.argv[1]
-app=sys.argv[2]
-dest=sys.argv[3]
-a=glob.glob('applications/%(app)s/*/plugin_%(name)s.*' % dict(app=app,name=name))
-b=glob.glob('applications/%(app)s/*/plugin_%(name)s/*' % dict(app=app,name=name))
+import sys
+import glob
+import os
+import shutil
+name = sys.argv[1]
+app = sys.argv[2]
+dest = sys.argv[3]
+a = glob.glob(
+ 'applications/%(app)s/*/plugin_%(name)s.*' % dict(app=app, name=name))
+b = glob.glob(
+ 'applications/%(app)s/*/plugin_%(name)s/*' % dict(app=app, name=name))
for f in a:
print 'cp %s ...' % f,
- shutil.copyfile(f,os.path.join('applications',dest,*f.split('/')[2:]))
+ shutil.copyfile(f, os.path.join('applications', dest, *f.split('/')[2:]))
print 'done'
for f in b:
print 'cp %s ...' % f,
path = f.split('/')
- for i in range(3,len(path)):
- try: os.mkdir(os.path.join('applications',dest,*path[2:i]))
- except: pass
- path = os.path.join('applications',dest,*f.split('/')[2:])
+ for i in range(3, len(path)):
+ try:
+ os.mkdir(os.path.join('applications', dest, *path[2:i]))
+ except:
+ pass
+ path = os.path.join('applications', dest, *f.split('/')[2:])
if os.path.isdir(f):
if not os.path.exists(path):
- shutil.copytree(f,path)
+ shutil.copytree(f, path)
else:
- shutil.copyfile(f,path)
+ shutil.copyfile(f, path)
print 'done'
-
diff --git a/scripts/dbsessions2trash.py b/scripts/dbsessions2trash.py
index 7226166c..a8ccd69d 100644
--- a/scripts/dbsessions2trash.py
+++ b/scripts/dbsessions2trash.py
@@ -10,17 +10,16 @@ DB_URI = 'sqlite://sessions.sqlite'
EXPIRATION_MINUTES = 60
SLEEP_MINUTES = 5
-while 1: # Infinite loop
- now = time() # get current Unix timestamp
+while 1: # Infinite loop
+ now = time() # get current Unix timestamp
for row in db().select(db.web2py_session_welcome.ALL):
t = row.modified_datetime
# Convert to a Unix timestamp
- t = mktime(t.timetuple())+1e-6*t.microsecond
+ t = mktime(t.timetuple()) + 1e-6 * t.microsecond
if now - t > EXPIRATION_MINUTES * 60:
del db.web2py_session_welcome[row.id]
- db.commit() # Write changes to database
+ db.commit() # Write changes to database
sleep(SLEEP_MINUTES * 60)
-
diff --git a/scripts/dict_diff.py b/scripts/dict_diff.py
index b3af81e1..f11b41ce 100644
--- a/scripts/dict_diff.py
+++ b/scripts/dict_diff.py
@@ -6,11 +6,11 @@
@license: MIT
@since: 2011-06-17
-Usage: dict_diff [OPTION]... dict1 dict2
+Usage: dict_diff [OPTION]... dict1 dict2
Show the differences for two dictionaries.
-h, --help Display this help message.
-
+
dict1 and dict2 are two web2py dictionary files to compare. These are the files
located in the "languages" directory of a web2py app. The tools show the
differences between the two files.
@@ -22,34 +22,38 @@ import getopt
import os.path
import sys
+
def main(argv):
"""Parse the arguments and start the main process."""
- try:
+ try:
opts, args = getopt.getopt(argv, "h", ["help"])
except getopt.GetoptError:
exit_with_parsing_error()
for opt, arg in opts:
arg = arg # To avoid a warning from Pydev
if opt in ("-h", "--help"):
- usage()
+ usage()
sys.exit()
if len(args) == 2:
params = list(get_dicts(*args))
params.extend(get_dict_names(*args))
compare_dicts(*params)
else:
- exit_with_parsing_error()
+ exit_with_parsing_error()
-def exit_with_parsing_error():
+
+def exit_with_parsing_error():
"""Report invalid arguments and usage."""
print("Invalid argument(s).")
usage()
sys.exit(2)
+
def usage():
"""Display the documentation"""
print(__doc__)
+
def get_dicts(dict_path1, dict_path2):
"""
Parse the dictionaries.
@@ -58,7 +62,8 @@ def get_dicts(dict_path1, dict_path2):
@return: The two dictionaries as a sequence.
"""
- return eval(open(dict_path1).read()), eval(open(dict_path2).read())
+ return eval(open(dict_path1).read()), eval(open(dict_path2).read())
+
def get_dict_names(dict1_path, dict2_path):
"""
@@ -75,7 +80,8 @@ def get_dict_names(dict1_path, dict2_path):
dict1_name = "dict1"
dict2_name = "dict2"
return dict1_name, dict2_name
-
+
+
def compare_dicts(dict1, dict2, dict1_name, dict2_name):
"""
Compare the two dictionaries. Print out the result.
@@ -100,7 +106,8 @@ def compare_dicts(dict1, dict2, dict1_name, dict2_name):
has_value_differences = True
if not has_value_differences:
print " None"
-
+
+
def print_key_diff(key_diff, dict1_name, dict2_name):
"""
Prints the keys in the first dictionary and are in the second dictionary.
@@ -116,6 +123,6 @@ def print_key_diff(key_diff, dict1_name, dict2_name):
else:
print " None"
print
-
+
if __name__ == "__main__":
main(sys.argv[1:]) # Start the process (without the application name)
diff --git a/scripts/extract_mysql_models.py b/scripts/extract_mysql_models.py
index b39c27c6..82efb9a5 100644
--- a/scripts/extract_mysql_models.py
+++ b/scripts/extract_mysql_models.py
@@ -24,45 +24,48 @@ import subprocess
import re
import sys
data_type_map = dict(
- varchar = 'string',
- int = 'integer',
- integer = 'integer',
- tinyint = 'integer',
- smallint = 'integer',
- mediumint = 'integer',
- bigint = 'integer',
- float = 'double',
- double = 'double',
- char = 'string',
- decimal = 'integer',
- date = 'date',
+ varchar='string',
+ int='integer',
+ integer='integer',
+ tinyint='integer',
+ smallint='integer',
+ mediumint='integer',
+ bigint='integer',
+ float='double',
+ double='double',
+ char='string',
+ decimal='integer',
+ date='date',
#year = 'date',
- time = 'time',
- timestamp = 'datetime',
- datetime = 'datetime',
- binary = 'blob',
- blob = 'blob',
- tinyblob = 'blob',
- mediumblob = 'blob',
- longblob = 'blob',
- text = 'text',
- tinytext = 'text',
- mediumtext = 'text',
- longtext = 'text',
- )
+ time='time',
+ timestamp='datetime',
+ datetime='datetime',
+ binary='blob',
+ blob='blob',
+ tinyblob='blob',
+ mediumblob='blob',
+ longblob='blob',
+ text='text',
+ tinytext='text',
+ mediumtext='text',
+ longtext='text',
+)
+
def mysql(database_name, username, password):
p = subprocess.Popen(['mysql',
'--user=%s' % username,
- '--password=%s'% password,
+ '--password=%s' % password,
'--execute=show tables;',
database_name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
sql_showtables, stderr = p.communicate()
- tables = [re.sub('\|\s+([^\|*])\s+.*', '\1', x) for x in sql_showtables.split()[1:]]
- connection_string = "legacy_db = DAL('mysql://%s:%s@localhost/%s')"%(username, password, database_name)
+ tables = [re.sub(
+ '\|\s+([^\|*])\s+.*', '\1', x) for x in sql_showtables.split()[1:]]
+ connection_string = "legacy_db = DAL('mysql://%s:%s@localhost/%s')" % (
+ username, password, database_name)
legacy_db_table_web2py_code = []
for table_name in tables:
#get the sql create statement
@@ -71,37 +74,40 @@ def mysql(database_name, username, password):
'--password=%s' % password,
'--skip-add-drop-table',
'--no-data', database_name,
- table_name], stdin=subprocess.PIPE, stdout=subprocess.PIPE,stderr=subprocess.PIPE)
- sql_create_stmnt,stderr = p.communicate()
- if 'CREATE' in sql_create_stmnt:#check if the table exists
+ table_name], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ sql_create_stmnt, stderr = p.communicate()
+ if 'CREATE' in sql_create_stmnt: # check if the table exists
#remove garbage lines from sql statement
sql_lines = sql_create_stmnt.split('\n')
- sql_lines = [x for x in sql_lines if not(x.startswith('--') or x.startswith('/*') or x =='')]
+ sql_lines = [x for x in sql_lines if not(
+ x.startswith('--') or x.startswith('/*') or x == '')]
#generate the web2py code from the create statement
web2py_table_code = ''
- table_name = re.search('CREATE TABLE .(\S+). \(', sql_lines[0]).group(1)
+ table_name = re.search(
+ 'CREATE TABLE .(\S+). \(', sql_lines[0]).group(1)
fields = []
for line in sql_lines[1:-1]:
if re.search('KEY', line) or re.search('PRIMARY', line) or re.search(' ID', line) or line.startswith(')'):
continue
hit = re.search('(\S+)\s+(\S+)(,| )( .*)?', line)
- if hit!=None:
+ if hit is not None:
name, d_type = hit.group(1), hit.group(2)
- d_type = re.sub(r'(\w+)\(.*',r'\1',d_type)
- name = re.sub('`','',name)
- web2py_table_code += "\n Field('%s','%s'),"%(name,data_type_map[d_type])
- web2py_table_code = "legacy_db.define_table('%s',%s\n migrate=False)"%(table_name,web2py_table_code)
+ d_type = re.sub(r'(\w+)\(.*', r'\1', d_type)
+ name = re.sub('`', '', name)
+ web2py_table_code += "\n Field('%s','%s')," % (
+ name, data_type_map[d_type])
+ web2py_table_code = "legacy_db.define_table('%s',%s\n migrate=False)" % (table_name, web2py_table_code)
legacy_db_table_web2py_code.append(web2py_table_code)
#----------------------------------------
#write the legacy db to file
- legacy_db_web2py_code = connection_string+"\n\n"
- legacy_db_web2py_code += "\n\n#--------\n".join(legacy_db_table_web2py_code)
+ legacy_db_web2py_code = connection_string + "\n\n"
+ legacy_db_web2py_code += "\n\n#--------\n".join(
+ legacy_db_table_web2py_code)
return legacy_db_web2py_code
regex = re.compile('(.*?):(.*?)@(.*)')
-if len(sys.argv)<2 or not regex.match(sys.argv[1]):
+if len(sys.argv) < 2 or not regex.match(sys.argv[1]):
print 'USAGE:\n\n extract_mysql_models.py username:password@data_basename\n\n'
else:
m = regex.match(sys.argv[1])
- print mysql(m.group(3),m.group(1),m.group(2))
-
+ print mysql(m.group(3), m.group(1), m.group(2))
diff --git a/scripts/extract_pgsql_models.py b/scripts/extract_pgsql_models.py
index a6b80016..79a1699f 100644
--- a/scripts/extract_pgsql_models.py
+++ b/scripts/extract_pgsql_models.py
@@ -46,19 +46,21 @@ KWARGS = ('type', 'length', 'default', 'required', 'ondelete',
import sys
-def query(conn, sql,*args):
+def query(conn, sql, *args):
"Execute a SQL query and return rows as a list of dicts"
cur = conn.cursor()
ret = []
try:
- if DEBUG: print >> sys.stderr, "QUERY: ", sql % args
+ if DEBUG:
+ print >> sys.stderr, "QUERY: ", sql % args
cur.execute(sql, args)
for row in cur:
dic = {}
for i, value in enumerate(row):
field = cur.description[i][0]
dic[field] = value
- if DEBUG: print >> sys.stderr, "RET: ", dic
+ if DEBUG:
+ print >> sys.stderr, "RET: ", dic
ret.append(dic)
return ret
finally:
@@ -75,7 +77,8 @@ def get_tables(conn, schema=SCHEMA):
def get_fields(conn, table):
"Retrieve field list for a given table"
- if DEBUG: print >> sys.stderr, "Processing TABLE", table
+ if DEBUG:
+ print >> sys.stderr, "Processing TABLE", table
rows = query(conn, """
SELECT column_name, data_type,
is_nullable,
@@ -90,13 +93,13 @@ def get_fields(conn, table):
def define_field(conn, table, field, pks):
"Determine field type, default value, references, etc."
- f={}
+ f = {}
ref = references(conn, table, field['column_name'])
if ref:
f.update(ref)
elif field['column_default'] and \
- field['column_default'].startswith("nextval") and \
- field['column_name'] in pks:
+ field['column_default'].startswith("nextval") and \
+ field['column_name'] in pks:
# postgresql sequence (SERIAL) and primary key!
f['type'] = "'id'"
elif field['data_type'].startswith('character'):
@@ -109,7 +112,7 @@ def define_field(conn, table, field, pks):
f['type'] = "'boolean'"
elif field['data_type'] in ('integer', 'smallint', 'bigint'):
f['type'] = "'integer'"
- elif field['data_type'] in ('double precision', 'real' ):
+ elif field['data_type'] in ('double precision', 'real'):
f['type'] = "'double'"
elif field['data_type'] in ('timestamp', 'timestamp without time zone'):
f['type'] = "'datetime'"
@@ -124,17 +127,17 @@ def define_field(conn, table, field, pks):
elif field['data_type'] in ('bytea', ):
f['type'] = "'blob'"
elif field['data_type'] in ('point', 'lseg', 'polygon', 'unknown', 'USER-DEFINED'):
- f['type'] = "" # unsupported?
+ f['type'] = "" # unsupported?
else:
raise RuntimeError("Data Type not supported: %s " % str(field))
try:
if field['column_default']:
- if field['column_default']=="now()":
+ if field['column_default'] == "now()":
d = "request.now"
- elif field['column_default']=="true":
+ elif field['column_default'] == "true":
d = "True"
- elif field['column_default']=="false":
+ elif field['column_default'] == "false":
d = "False"
else:
d = repr(eval(field['column_default']))
@@ -142,7 +145,8 @@ def define_field(conn, table, field, pks):
except (ValueError, SyntaxError):
pass
except Exception, e:
- raise RuntimeError("Default unsupported '%s'" % field['column_default'])
+ raise RuntimeError(
+ "Default unsupported '%s'" % field['column_default'])
if not field['is_nullable']:
f['notnull'] = "True"
@@ -203,40 +207,40 @@ def references(conn, table, field):
AND information_schema.key_column_usage.column_name=%s
AND information_schema.table_constraints.constraint_type='FOREIGN KEY'
;""", table, field)
- if len(rows1)==1:
+ if len(rows1) == 1:
rows2 = query(conn, """
SELECT table_name, column_name, *
FROM information_schema.constraint_column_usage
WHERE constraint_name=%s
""", rows1[0]['constraint_name'])
row = None
- if len(rows2)>1:
- row = rows2[int(rows1[0]['ordinal_position'])-1]
+ if len(rows2) > 1:
+ row = rows2[int(rows1[0]['ordinal_position']) - 1]
keyed = True
- if len(rows2)==1:
+ if len(rows2) == 1:
row = rows2[0]
keyed = False
if row:
- if keyed: # THIS IS BAD, DON'T MIX "id" and primarykey!!!
+ if keyed: # THIS IS BAD, DON'T MIX "id" and primarykey!!!
ref = {'type': "'reference %s.%s'" % (row['table_name'],
row['column_name'])}
else:
ref = {'type': "'reference %s'" % (row['table_name'],)}
- if rows1[0]['delete_rule']!="NO ACTION":
+ if rows1[0]['delete_rule'] != "NO ACTION":
ref['ondelete'] = repr(rows1[0]['delete_rule'])
return ref
elif rows2:
raise RuntimeError("Unsupported foreign key reference: %s" %
- str(rows2))
+ str(rows2))
elif rows1:
raise RuntimeError("Unsupported referential constraint: %s" %
- str(rows1))
+ str(rows1))
def define_table(conn, table):
"Output single table definition"
- fields = get_fields(conn, table)
+ fields = get_fields(conn, table)
pks = primarykeys(conn, table)
print "db.define_table('%s'," % (table, )
for field in fields:
@@ -244,11 +248,11 @@ def define_table(conn, table):
fdef = define_field(conn, table, field, pks)
if fname not in pks and is_unique(conn, table, field):
fdef['unique'] = "True"
- if fdef['type']=="'id'" and fname in pks:
+ if fdef['type'] == "'id'" and fname in pks:
pks.pop(pks.index(fname))
print " Field('%s', %s)," % (fname,
- ', '.join(["%s=%s" % (k, fdef[k]) for k in KWARGS
- if k in fdef and fdef[k]]))
+ ', '.join(["%s=%s" % (k, fdef[k]) for k in KWARGS
+ if k in fdef and fdef[k]]))
if pks:
print " primarykey=[%s]," % ", ".join(["'%s'" % pk for pk in pks])
print " migrate=migrate)"
@@ -280,5 +284,3 @@ if __name__ == "__main__":
)
# Start model code generation:
define_db(cnn, db, host, port, user, passwd)
-
-
diff --git a/scripts/fixws.py b/scripts/fixws.py
index 8a1e7433..706a413d 100755
--- a/scripts/fixws.py
+++ b/scripts/fixws.py
@@ -1,4 +1,6 @@
-import sys, glob
+import sys
+import glob
+
def read_fileb(filename, mode='rb'):
f = open(filename, mode)
@@ -7,6 +9,7 @@ def read_fileb(filename, mode='rb'):
finally:
f.close()
+
def write_fileb(filename, value, mode='wb'):
f = open(filename, mode)
try:
@@ -18,7 +21,7 @@ for filename in glob.glob(sys.argv[1]):
data1 = read_fileb(filename)
write_fileb(filename + '.bak2', data1)
data2lines = read_fileb(filename).strip().split('\n')
- data2 = '\n'.join([line.rstrip().replace('\t',' '*2) for line in data2lines])+'\n'
+ data2 = '\n'.join([line.rstrip(
+ ).replace('\t', ' ' * 2) for line in data2lines]) + '\n'
write_fileb(filename, data2)
- print filename, len(data1)-len(data2)
-
+ print filename, len(data1) - len(data2)
diff --git a/scripts/layout_make.py b/scripts/layout_make.py
index 84c86910..fe8baf34 100755
--- a/scripts/layout_make.py
+++ b/scripts/layout_make.py
@@ -5,10 +5,11 @@ import sys
import re
from BeautifulSoup import BeautifulSoup as BS
+
def head(styles):
title = '{{=response.title or request.application}}'
items = '\n'.join(["{{response.files.append(URL(request.application,'static','%s'))}}" % (style) for style in styles])
- loc=""""""
- return "\n%s\n%s\n{{include 'web2py_ajax.html'}}\n%s" % (title,items,loc)
+ return "\n%s\n%s\n{{include 'web2py_ajax.html'}}\n%s" % (title, items, loc)
+
def content():
return """{{=response.flash or ''}}
{{include}}"""
+
def process(folder):
- indexfile = open(os.path.join(folder,'index.html'),'rb')
+ indexfile = open(os.path.join(folder, 'index.html'), 'rb')
try:
soup = BS(indexfile.read())
finally:
indexfile.close()
styles = [x['href'] for x in soup.findAll('link')]
- soup.find('head').contents=BS(head(styles))
+ soup.find('head').contents = BS(head(styles))
try:
- soup.find('h1').contents=BS('{{=response.title or request.application}}')
- soup.find('h2').contents=BS("{{=response.subtitle or '=response.subtitle'}}")
+ soup.find(
+ 'h1').contents = BS('{{=response.title or request.application}}')
+ soup.find('h2').contents = BS(
+ "{{=response.subtitle or '=response.subtitle'}}")
except:
pass
- for match in (soup.find('div',id='menu'),
- soup.find('div',{'class':'menu'}),
- soup.find('div',id='nav'),
- soup.find('div',{'class':'nav'})):
+ for match in (soup.find('div', id='menu'),
+ soup.find('div', {'class': 'menu'}),
+ soup.find('div', id='nav'),
+ soup.find('div', {'class': 'nav'})):
if match:
- match.contents=BS('{{=MENU(response.menu)}}')
+ match.contents = BS('{{=MENU(response.menu)}}')
break
- done=False
- for match in (soup.find('div',id='content'),
- soup.find('div',{'class':'content'}),
- soup.find('div',id='main'),
- soup.find('div',{'class':'main'})):
+ done = False
+ for match in (soup.find('div', id='content'),
+ soup.find('div', {'class': 'content'}),
+ soup.find('div', id='main'),
+ soup.find('div', {'class': 'main'})):
if match:
- match.contents=BS(content())
- done=True
+ match.contents = BS(content())
+ done = True
break
if done:
page = soup.prettify()
- page = re.compile("\s*\{\{=response\.flash or ''\}\}\s*",re.MULTILINE)\
- .sub("{{=response.flash or ''}}",page)
+ page = re.compile("\s*\{\{=response\.flash or ''\}\}\s*", re.MULTILINE)\
+ .sub("{{=response.flash or ''}}", page)
print page
else:
- raise Exception, "Unable to convert"
+ raise Exception("Unable to convert")
-if __name__=='__main__':
- if len(sys.argv)<2:
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
print """USAGE:
1) start a new web2py application
2) Download a sample free layout from the web into the static/ folder of
@@ -96,4 +101,3 @@ if __name__=='__main__':
print 'Folder %s does not exist' % sys.argv[1]
else:
process(sys.argv[1])
-
diff --git a/scripts/make_min_web2py.py b/scripts/make_min_web2py.py
index 4c3fec52..9fd0c19c 100644
--- a/scripts/make_min_web2py.py
+++ b/scripts/make_min_web2py.py
@@ -38,51 +38,60 @@ gluon/contrib/pyrtf/
gluon/contrib/pysimplesoap/
"""
-import sys, os, shutil, glob
+import sys
+import os
+import shutil
+import glob
+
def main():
global REQUIRED, IGNORED
-
- if len(sys.argv)<2:
+
+ if len(sys.argv) < 2:
print USAGE
-
+
# make target folder
target = sys.argv[1]
os.mkdir(target)
-
+
# change to os specificsep
- REQUIRED = REQUIRED.replace('/',os.sep)
- IGNORED = IGNORED.replace('/',os.sep)
-
+ REQUIRED = REQUIRED.replace('/', os.sep)
+ IGNORED = IGNORED.replace('/', os.sep)
# make a list of all files to include
- files = [x.strip() for x in REQUIRED.split('\n') \
- if x and not x[0]=='#']
- ignore = [x.strip() for x in IGNORED.split('\n') \
- if x and not x[0]=='#']
-
+ files = [x.strip() for x in REQUIRED.split('\n')
+ if x and not x[0] == '#']
+ ignore = [x.strip() for x in IGNORED.split('\n')
+ if x and not x[0] == '#']
+
def accept(filename):
for p in ignore:
if filename.startswith(p):
return False
return True
- pattern = os.path.join('gluon','*.py')
+ pattern = os.path.join('gluon', '*.py')
while True:
newfiles = [x for x in glob.glob(pattern) if accept(x)]
- if not newfiles: break
+ if not newfiles:
+ break
files += newfiles
- pattern = os.path.join(pattern[:-3],'*.py')
+ pattern = os.path.join(pattern[:-3], '*.py')
# copy all files, make missing folder, build default.py
files.sort()
- defaultpy = os.path.join('applications','welcome','controllers','default.py')
+ defaultpy = os.path.join(
+ 'applications', 'welcome', 'controllers', 'default.py')
for f in files:
dirs = f.split(os.path.sep)
- for i in range(1,len(dirs)):
- try: os.mkdir(target+os.sep+os.path.join(*dirs[:i]))
- except OSError: pass
- if f==defaultpy:
- open(os.path.join(target,f),'w').write('def index(): return "hello"\n')
+ for i in range(1, len(dirs)):
+ try:
+ os.mkdir(target + os.sep + os.path.join(*dirs[:i]))
+ except OSError:
+ pass
+ if f == defaultpy:
+ open(os.path.join(
+ target, f), 'w').write('def index(): return "hello"\n')
else:
- shutil.copyfile(f,os.path.join(target,f))
-
-if __name__=='__main__': main()
+ shutil.copyfile(f, os.path.join(target, f))
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/rmorphans.py b/scripts/rmorphans.py
index d297939b..8b50ef61 100644
--- a/scripts/rmorphans.py
+++ b/scripts/rmorphans.py
@@ -6,18 +6,18 @@ paths2 = []
while paths:
path = paths.pop()
for filename in os.listdir(path):
- fullname = os.path.join(path,filename)
+ fullname = os.path.join(path, filename)
if os.path.isdir(fullname):
paths.append(fullname)
else:
extension = filename.split('.')[-1]
- if extension.lower() in ('png','gif','jpg','jpeg','js','css'):
- paths1.append((filename,fullname))
- if extension.lower() in ('css','js','py','html'):
+ if extension.lower() in ('png', 'gif', 'jpg', 'jpeg', 'js', 'css'):
+ paths1.append((filename, fullname))
+ if extension.lower() in ('css', 'js', 'py', 'html'):
paths2.append(fullname)
-for filename,fullname in paths1:
+for filename, fullname in paths1:
for otherfullname in paths2:
- if open(otherfullname).read().find(filename)>=0:
+ if open(otherfullname).read().find(filename) >= 0:
break
else:
print fullname
diff --git a/scripts/sessions2trash.py b/scripts/sessions2trash.py
index 5357659f..11e685fc 100755
--- a/scripts/sessions2trash.py
+++ b/scripts/sessions2trash.py
@@ -162,7 +162,7 @@ class SessionFile(object):
def last_visit_default(self):
return datetime.datetime.fromtimestamp(
- os.stat(self.filename)[stat.ST_MTIME])
+ os.stat(self.filename)[stat.ST_MTIME])
def __str__(self):
return self.filename
@@ -175,7 +175,7 @@ def total_seconds(delta):
Args:
delta: datetime.timedelta instance.
"""
- return (delta.microseconds + (delta.seconds + (delta.days * 24 * 3600)) * \
+ return (delta.microseconds + (delta.seconds + (delta.days * 24 * 3600)) *
10 ** 6) / 10 ** 6
@@ -186,25 +186,25 @@ def main():
parser = OptionParser(usage=usage)
parser.add_option('-f', '--force',
- action='store_true', dest='force', default=False,
- help=('Ignore session expiration. '
- 'Force expiry based on -x option or auth.settings.expiration.')
- )
+ action='store_true', dest='force', default=False,
+ help=('Ignore session expiration. '
+ 'Force expiry based on -x option or auth.settings.expiration.')
+ )
parser.add_option('-o', '--once',
- action='store_true', dest='once', default=False,
- help='Delete sessions, then exit.',
- )
+ action='store_true', dest='once', default=False,
+ help='Delete sessions, then exit.',
+ )
parser.add_option('-s', '--sleep',
- dest='sleep', default=SLEEP_MINUTES * 60, type="int",
- help='Number of seconds to sleep between executions. Default 300.',
- )
+ dest='sleep', default=SLEEP_MINUTES * 60, type="int",
+ help='Number of seconds to sleep between executions. Default 300.',
+ )
parser.add_option('-v', '--verbose',
- default=0, action='count',
- help="print verbose output, a second -v increases verbosity")
+ default=0, action='count',
+ help="print verbose output, a second -v increases verbosity")
parser.add_option('-x', '--expiration',
- dest='expiration', default=None, type="int",
- help='Expiration value for sessions without expiration (in seconds)',
- )
+ dest='expiration', default=None, type="int",
+ help='Expiration value for sessions without expiration (in seconds)',
+ )
(options, unused_args) = parser.parse_args()
diff --git a/scripts/standalone_exe_cxfreeze.py b/scripts/standalone_exe_cxfreeze.py
index 9eb90d14..a21451ff 100644
--- a/scripts/standalone_exe_cxfreeze.py
+++ b/scripts/standalone_exe_cxfreeze.py
@@ -30,35 +30,35 @@ if sys.platform == 'win32':
base_modules.remove('macpath')
buildOptions = dict(
- compressed = True,
- excludes = ["macpath","PyQt4"],
- includes = base_modules,
- include_files=[
- 'applications',
- 'ABOUT',
- 'LICENSE',
- 'VERSION',
- 'logging.example.conf',
- 'options_std.py',
- 'app.example.yaml',
- 'queue.example.yaml',
- ],
- # append any extra module by extending the list below -
- # "contributed_modules+["lxml"]"
- packages = contributed_modules,
- )
+ compressed=True,
+ excludes=["macpath", "PyQt4"],
+ includes=base_modules,
+ include_files=[
+ 'applications',
+ 'ABOUT',
+ 'LICENSE',
+ 'VERSION',
+ 'logging.example.conf',
+ 'options_std.py',
+ 'app.example.yaml',
+ 'queue.example.yaml',
+ ],
+ # append any extra module by extending the list below -
+ # "contributed_modules+["lxml"]"
+ packages=contributed_modules,
+)
setup(
- name = "Web2py",
- version=web2py_version,
- author="Massimo DiPierro",
- description="web2py web framework",
- license = "LGPL v3",
- options = dict(build_exe = buildOptions),
- executables = [Executable("web2py.py",
- base=base,
- compress = True,
- icon = "web2py.ico",
- targetName="web2py.exe",
- copyDependentFiles = True)],
- )
+ name="Web2py",
+ version=web2py_version,
+ author="Massimo DiPierro",
+ description="web2py web framework",
+ license="LGPL v3",
+ options=dict(build_exe=buildOptions),
+ executables=[Executable("web2py.py",
+ base=base,
+ compress=True,
+ icon="web2py.ico",
+ targetName="web2py.exe",
+ copyDependentFiles=True)],
+)
diff --git a/scripts/sync_languages.py b/scripts/sync_languages.py
index 8f1f8d6d..ff36c779 100755
--- a/scripts/sync_languages.py
+++ b/scripts/sync_languages.py
@@ -14,26 +14,27 @@ sys.path.insert(0, '.')
file = sys.argv[1]
apps = sys.argv[2:]
+
def sync_language(d, data):
- ''' this function makes sure a translated string will be prefered over an untranslated
- string when syncing languages between apps. when both are translated, it prefers the
+ ''' this function makes sure a translated string will be prefered over an untranslated
+ string when syncing languages between apps. when both are translated, it prefers the
latter app, as did the original script
'''
-
+
for key in data:
# if this string is not in the allready translated data, add it
if key not in d:
d[key] = data[key]
# see if there is a translated string in the original list, but not in the new list
- elif (
- ((d[key] != '') or (d[key] != key)) and
- ((data[key] == '') or (data[key] == key))
- ):
+ elif (
+ ((d[key] != '') or (d[key] != key)) and
+ ((data[key] == '') or (data[key] == key))
+ ):
d[key] = d[key]
# any other case (wether there is or there isn't a translated string)
else:
d[key] = data[key]
-
+
return d
d = {}
@@ -45,7 +46,7 @@ for app in apps:
data = eval(langfile.read())
finally:
langfile.close()
-
+
d = sync_language(d, data)
path = 'applications/%s/' % apps[-1]
@@ -68,4 +69,3 @@ for app in oapps:
path2 = 'applications/%s/' % app
file2 = os.path.join(path2, 'languages', '%s.py' % file)
shutil.copyfile(file1, file2)
-
diff --git a/scripts/tickets2db.py b/scripts/tickets2db.py
index cfcb15b3..ecb1b4cd 100755
--- a/scripts/tickets2db.py
+++ b/scripts/tickets2db.py
@@ -15,7 +15,7 @@ SLEEP_MINUTES = 5
errors_path = os.path.join(request.folder, 'errors')
try:
- db_string = open(os.path.join(request.folder, 'private', 'ticket_storage.txt')).read().replace('\r','').replace('\n','').strip()
+ db_string = open(os.path.join(request.folder, 'private', 'ticket_storage.txt')).read().replace('\r', '').replace('\n', '').strip()
except:
db_string = 'sqlite://storage.db'
@@ -23,7 +23,8 @@ db_path = os.path.join(request.folder, 'databases')
tk_db = DAL(db_string, folder=db_path, auto_import=True)
ts = TicketStorage(db=tk_db)
-tk_table = ts._get_table(db=tk_db, tablename=ts.tablename, app=request.application)
+tk_table = ts._get_table(
+ db=tk_db, tablename=ts.tablename, app=request.application)
hashes = {}
@@ -46,6 +47,5 @@ while 1:
)
tk_db.commit()
os.unlink(filename)
-
- time.sleep(SLEEP_MINUTES * 60)
+ time.sleep(SLEEP_MINUTES * 60)
diff --git a/scripts/tickets2email.py b/scripts/tickets2email.py
index 2a557282..8fe4e896 100755
--- a/scripts/tickets2email.py
+++ b/scripts/tickets2email.py
@@ -42,8 +42,8 @@ while 1:
error = RestrictedError()
error.load(request, request.application, file)
- mail.send(to=administrator_email, subject='new web2py ticket', message=error.traceback)
+ mail.send(to=administrator_email,
+ subject='new web2py ticket', message=error.traceback)
os.unlink(os.path.join(path, file))
time.sleep(SLEEP_MINUTES * 60)
-
diff --git a/scripts/update_web2py.py b/scripts/update_web2py.py
index c5606966..a570ced6 100644
--- a/scripts/update_web2py.py
+++ b/scripts/update_web2py.py
@@ -1,9 +1,9 @@
-#!/usr/bin/env python
+#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
crontab -e
-* 3 * * * root path/to/this/file
+* 3 * * * root path/to/this/file
"""
USER = 'www-data'
@@ -14,7 +14,7 @@ import os
import urllib
import zipfile
-if len(sys.argv)>1 and sys.argv[1] == 'nightly':
+if len(sys.argv) > 1 and sys.argv[1] == 'nightly':
version = 'http://web2py.com/examples/static/nightly/web2py_src.zip'
else:
version = 'http://web2py.com/examples/static/web2py_src.zip'
@@ -23,11 +23,11 @@ realpath = os.path.realpath(__file__)
path = os.path.dirname(os.path.dirname(os.path.dirname(realpath)))
os.chdir(path)
try:
- old_version = open('web2py/VERSION','r').read().strip()
+ old_version = open('web2py/VERSION', 'r').read().strip()
except IOError:
old_version = ''
-open(TMPFILENAME,'wb').write(urllib.urlopen(version).read())
+open(TMPFILENAME, 'wb').write(urllib.urlopen(version).read())
new_version = zipfile.ZipFile(TMPFILENAME).read('web2py/VERSION').strip()
-if new_version>old_version:
- os.system('sudo -u %s unzip -o %s' % (USER,TMPFILENAME))
+if new_version > old_version:
+ os.system('sudo -u %s unzip -o %s' % (USER, TMPFILENAME))
os.system('apachectl restart | apache2ctl restart')
diff --git a/scripts/zip_static_files.py b/scripts/zip_static_files.py
index 67d342f0..57054e5b 100644
--- a/scripts/zip_static_files.py
+++ b/scripts/zip_static_files.py
@@ -7,6 +7,7 @@
import os
import gzip
+
def zip_static(filelist=[]):
tsave = 0
for fi in filelist:
@@ -24,17 +25,18 @@ def zip_static(filelist=[]):
if zatime == atime and zmtime == mtime:
print 'skipping %s, already gzipped to the latest version' % os.path.basename(fi)
continue
- print 'gzipping %s to %s' % (os.path.basename(fi), os.path.basename(gfi))
+ print 'gzipping %s to %s' % (
+ os.path.basename(fi), os.path.basename(gfi))
f_in = open(fi, 'rb')
f_out = gzip.open(gfi, 'wb')
f_out.writelines(f_in)
f_out.close()
f_in.close()
- os.utime(gfi, (atime,mtime))
+ os.utime(gfi, (atime, mtime))
saved = fstats.st_size - os.stat(gfi).st_size
- tsave+= saved
+ tsave += saved
- print 'saved %s KB' % (int(tsave)/1000.0)
+ print 'saved %s KB' % (int(tsave) / 1000.0)
if __name__ == '__main__':
ALLOWED_EXTS = ['.css', '.js']