Compare commits

..

36 Commits

Author SHA1 Message Date
mdipierro
d566e4f444 simplified grid console logic and style 2012-09-04 17:58:52 -05:00
mdipierro
2174677f6f R-2.0.7 2012-09-04 15:32:27 -05:00
mdipierro
fb544a832c fixed issue 976 with quoting in pg8000 2012-09-04 15:09:02 -05:00
mdipierro
d9121967e7 fixed problem with headers in components 2012-09-04 15:07:09 -05:00
mdipierro
f2247bc5b6 prevent exception in admin, thanks Marin 2012-09-04 14:47:13 -05:00
mdipierro
ee0cffc944 fixed update with compute fields (again) and ecomponents in markmin 2012-09-04 14:43:38 -05:00
mdipierro
a45e08c43b try fetchall except, back in executesql, thanks Carlos 2012-09-03 16:15:06 -05:00
mdipierro
9c931025cc executesql(fetch=False) 2012-09-03 14:35:01 -05:00
mdipierro
07a97d62b8 minor changes in web2py.css 2012-09-03 10:18:15 -05:00
mdipierro
65fec492f0 fixed input-xlarge class issue, thanks Anthony 2012-09-03 08:55:54 -05:00
mdipierro
da7d3c6dbd updated markmin docs 2012-09-03 08:54:45 -05:00
mdipierro
73c66c142d fixed error in admin login 2012-09-03 08:47:10 -05:00
mdipierro
29c513e5a3 faster custom_import, thanks Michele 2012-09-03 08:17:05 -05:00
mdipierro
72bb9d3513 executesql allows fields and/or colnames to be independently specfied, thanks Ahtony and Niphlod 2012-09-03 08:12:40 -05:00
mdipierro
90c0e0ff7e CACHED_REGEXES_MAX_SIZE, thanks Anthony 2012-09-03 08:09:49 -05:00
mdipierro
7b24ce3f41 fixed backward compatibility issue in dal with __int__, thanks Dominic 2012-09-02 22:34:24 -05:00
mdipierro
d61748466e conditional session connect only 2012-09-02 22:30:33 -05:00
mdipierro
c17c28e42b routes_in = [('/welcome','/welcome',dict(web2py_disable_session = True))] 2012-09-02 15:09:32 -05:00
mdipierro
524a65d0a9 routes_in = (regex, value, custom_env) 2012-09-02 15:03:25 -05:00
mdipierro
51d8932252 allow navbar without define_tables, thanks Anthony 2012-09-02 14:52:52 -05:00
mdipierro
7b655465bb faster re.compile in validators 2012-09-02 12:26:16 -05:00
mdipierro
71c44e62b8 conditional models with cached re.compile, thanks Anthony 2012-09-02 12:12:59 -05:00
mdipierro
2295b93f32 capitalized regex in dal.py 2012-09-02 11:58:56 -05:00
mdipierro
04d0b82268 less regex in dal.py 2012-09-02 11:50:35 -05:00
mdipierro
dffb2eada2 removed one re.compile in rewrite filter_url 2012-09-02 11:12:46 -05:00
mdipierro
c109fa727c optional re.compile of generic_patterns 2012-09-02 11:05:19 -05:00
mdipierro
91c9c6fb28 R-2.0.6 2012-09-01 22:35:40 -05:00
mdipierro
e4f8896b7f fixed bug in tickets2emails, thanks Niphlod 2012-09-01 22:33:46 -05:00
mdipierro
a558af3b09 fixed bug in language file that corrupts files on language update, thanks kverdecia2 2012-09-01 22:32:12 -05:00
mdipierro
918d3bd6df fixed typo, thanks Mart 2012-09-01 07:01:38 -05:00
mdipierro
e4b6fba5ca R-2.0.5 2012-08-31 16:28:40 -05:00
mdipierro
41caa71ab0 scheduler validators, thanks Niphlod 2012-08-31 16:15:59 -05:00
mdipierro
f8786e5b6d 2.0.5 2012-08-31 16:11:56 -05:00
mdipierro
78b5f4f8aa better timezone logic 2012-08-31 16:04:13 -05:00
mdipierro
a0e4154f26 better timezone logic 2012-08-31 16:00:23 -05:00
mdipierro
3f7749cf20 R-2.0.4 2012-08-31 15:38:37 -05:00
26 changed files with 486 additions and 299 deletions

View File

@@ -23,6 +23,7 @@
- Support for Google App Engine projections, thanks Christian
- Field(... 'upload', default=path) now accepts a path to a local file as default value, if user does not upload a file. Relative path looks inside current application folder, thanks Marin
- executesql(...,fields=,columns=) allows parsing of results in Rows, thanks Anthony
- Rows.find(lambda row: bool(), limitby=(0,1))
### Auth improvements

View File

@@ -29,14 +29,14 @@ update:
wget -O gluon/contrib/simplejsonrpc.py http://rad2py.googlecode.com/hg/ide2py/simplejsonrpc.py
echo "remember that pymysql was tweaked"
src:
echo 'Version 2.0.3 ('`date +%Y-%m-%d\ %H:%M:%S`') stable' > VERSION
echo 'Version 2.0.7 ('`date +%Y-%m-%d\ %H:%M:%S`') stable' > VERSION
### rm -f all junk files
make clean
### clean up baisc apps
rm -f routes.py
rm -f applications/*/sessions/*
rm -f applications/*/sessions/*
rm -f applications/*/errors/* | echo 'too many files'
rm -f applications/*/cache/*
rm -f applications/*/cache/*
rm -f applications/admin/databases/*
rm -f applications/welcome/databases/*
rm -f applications/examples/databases/*

View File

@@ -1 +1 @@
Version 2.0.3 (2012-08-31 14:27:45) stable
Version 2.0.7 (2012-09-04 17:58:46) stable

View File

@@ -244,11 +244,15 @@ def site():
DIV(T('Unable to download app because:'),PRE(str(e)))
redirect(URL(r=request))
fname = form_update.vars.url
elif form_update.accepted and form_update.vars.file:
fname = request.vars.file.filename
f = request.vars.file.file
else:
session.flash = 'No file uploaded and no URL specified'
redirect(URL(r=request))
if f:
appname = cleanpath(form_update.vars.name)
installed = app_install(appname, f,

View File

@@ -16,6 +16,8 @@
'are not used yet': 'are not used yet',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'arguments': 'arguments',
'at char %s': 'at char %s',
'at line %s': 'at line %s',
'back': '<<back',
'can be a git repo': 'can be a git repo',
'Change admin password': 'Change admin password',
@@ -58,6 +60,7 @@
'exposes': 'exposes',
'exposes:': 'exposes:',
'extends': 'extends',
'failed to compile file because:': 'failed to compile file because:',
'file does not exist': 'file does not exist',
'file saved on %s': 'file saved on %s',
'filter': 'filter',
@@ -70,7 +73,9 @@
'inspect attributes': 'inspect attributes',
'Install': 'Install',
'Installed applications': 'Installed applications',
'invalid password.': 'invalid password.',
'Key bindings': 'Key bindings',
'Language files (static strings) updated': 'Language files (static strings) updated',
'languages': 'languages',
'Languages': 'Languages',
'Last saved on:': 'Last saved on:',
@@ -85,6 +90,7 @@
'modules': 'modules',
'New application wizard': 'New application wizard',
'New simple application': 'New simple application',
'online designer': 'online designer',
'Overwrite installed app': 'Overwrite installed app',
'Pack all': 'Pack all',
'Peeking at file': 'Peeking at file',
@@ -107,6 +113,7 @@
'Save via Ajax': 'Save via Ajax',
'Saved file hash:': 'Saved file hash:',
'session': 'session',
'session expired': 'session expired',
'shell': 'shell',
'Site': 'Site',
'Start wizard': 'Start wizard',

View File

@@ -55,7 +55,7 @@ function doClickSave() {
prepareDataForSave('saved_on', jQuery("input[name='saved_on']").val()),
prepareDataForSave('saved_on', jQuery("input[name='saved_on']").val()),
prepareDataForSave('from_ajax','true')));
// console.info(area.textarea.value);
// console.info(area.textarea.value);
jQuery("input[name='saved_on']").attr('style','background-color:yellow');
jQuery("input[name='saved_on']").val('saving now...')
jQuery.ajax({
@@ -65,13 +65,13 @@ function doClickSave() {
dataType: "json",
data: dataForPost[0],
timeout: 5000,
beforeSend: function(xhr) {
beforeSend: function(xhr) {
xhr.setRequestHeader('web2py-component-location',document.location);
xhr.setRequestHeader('web2py-component-element','doClickSave');},
success: function(json,text,xhr){
success: function(json,text,xhr){
// show flash message (if any)
var flash=xhr.getResponseHeader('web2py-component-flash');
// show flash message (if any)
var flash=xhr.getResponseHeader('web2py-component-flash');
if (flash) jQuery('.flash').html(decodeURIComponent(flash)).slideDown();
else jQuery('.flash').hide();

View File

@@ -2,7 +2,9 @@
<h2>web2py&trade; {{=T('Web Framework')}}</h2>
<h3>{{=T('Login to the Administrative Interface')}}</h3>
<div>
{{if request.is_https or request.is_local:}}
<div class="form">
<form action="{{=URL(r=request)}}" method="post">
<div><input type="hidden" name="send" value="{{=send}}"/></div>
<table>
@@ -11,4 +13,6 @@
</table>
</form>
</div>
{{else:}}
<p class="help">{{=T('ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.')}}</p>
{{pass}}

File diff suppressed because one or more lines are too long

View File

@@ -60,6 +60,7 @@
'edit profile': 'modifica profilo',
'Edit This App': 'Modifica questa applicazione',
'Email and SMS': 'Email and SMS',
'Email non valida': 'Email non valida',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'Errors': 'Errors',
'export as csv file': 'esporta come file CSV',
@@ -95,6 +96,7 @@
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'login': 'accesso',
'Login': 'Login',
'logout': 'uscita',
@@ -113,6 +115,7 @@
'new record inserted': 'nuovo record inserito',
'next 100 rows': 'prossime 100 righe',
'No databases in this application': 'Nessun database presente in questa applicazione',
'Non può essere vuoto': 'Non può essere vuoto',
'not authorized': 'non autorizzato',
'Object or table name': 'Object or table name',
'Online examples': 'Vedere gli esempi',
@@ -166,12 +169,14 @@
'This App': 'This App',
'This is a copy of the scaffolding application': "Questa è una copia dell'applicazione di base (scaffold)",
'Timestamp': 'Ora (timestamp)',
'too short': 'too short',
'Twitter': 'Twitter',
'unable to parse csv file': 'non riesco a decodificare questo file CSV',
'Update': 'Update',
'Update:': 'Aggiorna:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Per costruire richieste (query) più complesse si usano (...)&(...) come "e" (AND), (...)|(...) come "o" (OR), e ~(...) come negazione (NOT).',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Registered': 'User %(id)s Registered',
'User ID': 'ID Utente',
'Verify Password': 'Verify Password',

View File

@@ -15,9 +15,9 @@ td,th {text-align:left; padding:2px 5px 2px 5px}
th {vertical-align:middle; border-right:1px solid white}
td {vertical-align:top}
form table tr td label {text-align:left}
p,table,ol,ul {padding:0; margin: 0.5em 0}
p,table,ol,ul {padding:0; margin: 0.75em 0}
p {text-align:justify}
ol, ul {list-style-position:inside}
ol, ul {list-style-position:outside; margin-left:2em}
li {margin-bottom:0.5em}
span,input,select,textarea,button,label,a {display:inline}
img {border:0}
@@ -39,13 +39,6 @@ input[type=text],input[type=password],select{width:300px; margin-right:5px}
/* Sticky footer begin */
.wrapper {
min-height:100%;
height:auto !important;
height:100%;
margin:0 auto -8em; /* set last value to footer height plus footer vertical padding */
}
.main {
padding:20px 0 50px 0;
}

View File

@@ -129,7 +129,7 @@
</section><!--/main-->
<!-- Footer ================================================== -->
<footer class="footer">
<footer class="footer" id="footer">
{{block footer}} <!-- this is default footer -->
<div class="footer-content">
<div class="copyright pull-left">{{=T('Copyright')}} &#169; {{=request.now.year}}</div>

View File

@@ -40,7 +40,6 @@ __all__ = ['Cache', 'lazy_cache']
DEFAULT_TIME_EXPIRE = 300
class CacheAbstract(object):
"""
Abstract class for cache implementations.

View File

@@ -91,6 +91,18 @@ def _TEST():
_TEST()
"""
CACHED_REGEXES = {}
CACHED_REGEXES_MAX_SIZE = 1000
def re_compile(regex):
try:
return CACHED_REGEXES[regex]
except KeyError:
if len(CACHED_REGEXES) >= CACHED_REGEXES_MAX_SIZE:
CACHED_REGEXES.clear()
compiled_regex = CACHED_REGEXES[regex] = re.compile(regex)
return compiled_regex
class mybuiltin(object):
"""
NOTE could simple use a dict and populate it,
@@ -355,7 +367,11 @@ def build_environment(request, response, session, store_current=True):
environment.update((k,getattr(v, k)) for k in v.__all__)
if not request.env:
request.env = Storage()
# Enable standard conditional models (i.e., /*.py, /[controller]/*.py, and
# /[controller]/[function]/*.py)
response.models_to_run = [r'^\w+\.py$', r'^%s/\w+\.py$' % request.controller,
r'^%s/%s/\w+\.py$' % (request.controller, request.function)]
t = environment['T'] = translator(request)
c = environment['cache'] = Cache(request)
if store_current:
@@ -485,9 +501,13 @@ def run_models_in(environment):
path = pjoin(folder, 'models')
models = listdir(path, '^\w+\.py$',0,sort=False)
compiled=False
paths = (path, pjoin(path,c), pjoin(path,c,f))
n = len(path) + 1
for model in models:
if not os.path.split(model)[0] in paths and c!='appadmin':
regex = environment['response'].models_to_run
if isinstance(regex, list):
regex = re_compile('|'.join(regex))
file = model[n:].replace(os.path.sep, '/').replace('.pyc', '.py')
if not regex.search(file) and c!= 'appadmin':
continue
elif compiled:
code = read_pyc(model)
@@ -581,10 +601,13 @@ def run_view_in(environment):
folder = request.folder
path = pjoin(folder, 'compiled')
badv = 'invalid view (%s)' % view
patterns = response.generic_patterns or []
regex = re.compile('|'.join(map(fnmatch.translate, patterns)))
short_action = '%(controller)s/%(function)s.%(extension)s' % request
allow_generic = patterns and regex.search(short_action)
if response.generic_patterns:
patterns = response.generic_patterns
regex = re_compile('|'.join(map(fnmatch.translate, patterns)))
short_action = '%(controller)s/%(function)s.%(extension)s' % request
allow_generic = regex.search(short_action)
else:
allow_generic = False
if not isinstance(view, str):
ccode = parse_template(view, pjoin(folder, 'views'),
context=environment)

View File

@@ -544,6 +544,7 @@ regex_list=re.compile('^(?:(?:(#{1,6})|(?:(\.+|\++|\-+)(\.)?))\s+)?(.*)$')
regex_bq_headline=re.compile('^(?:(\.+|\++|\-+)(\.)?\s+)?(-{3}-*)$')
regex_tq=re.compile('^(-{3}-*)(?::(?P<c>[a-zA-Z][_a-zA-Z\-\d]*)(?:\[(?P<p>[a-zA-Z][_a-zA-Z\-\d]*)\])?)?$')
regex_proto = re.compile(r'(?<!["\w>/=])(?P<p>\w+):(?P<k>\w+://[\w\d\-+=?%&/:.]+)', re.M)
regex_auto = re.compile(r'(?<!["\w>/=])(?P<k>\w+://[^\s\'\"\]\}\)]+)',re.M)
regex_auto = re.compile(r'(?<!["\w>/=])(?P<k>\w+://[\w\d\-+_=?%&/:.]+)',re.M)
regex_link=re.compile(r'('+LINK+r')|\[\[(?P<s>.+?)\]\]')
regex_link_level2=re.compile(r'^(?P<t>\S.*?)?(?:\s+\[(?P<a>.+?)\])?(?:\s+(?P<k>\S+))?(?:\s+(?P<p>popup))?\s*$')
@@ -620,7 +621,7 @@ def render(text,
- class_prefix is a prefix for ALL classes in markmin text. E.g. if class_prefix='my_'
then for ``test``:cls class will be changed to "my_cls" (default value is '')
- id_prefix is prefix for ALL ids in markmin text (default value is 'markmin_'). E.g.:
-- [[id]] will be converted to <a name="markmin_id"></a>
-- [[id]] will be converted to <div class="anchor" id="markmin_id"></div>
-- [[link #id]] will be converted to <a href="#markmin_id">link</a>
-- ``test``:cls[id] will be converted to <code class="cls" id="markmin_id">test</code>
@@ -832,22 +833,11 @@ def render(text,
# this is experimental @{function/args}
# turns into a digitally signed URL
def u1(match,URL=URL):
a,c,f,args = match.group('a','c','f','args')
a,c,f,args = match.group('a','c','f','args')
return URL(a=a or None,c=c or None,f = f or None,
args=args.split('/'), scheme=True, host=True)
text = regex_URL.sub(u1,text)
if environment:
def u2(match, environment=environment):
f = environment.get(match.group('a'), match.group(0))
if callable(f):
try:
f = f(match.group('b'))
except Exception, e:
f = 'ERROR: %s' % e
return str(f)
text = regex_env.sub(u2, text)
if latex == 'google':
text = regex_dd.sub('``\g<latex>``:latex ', text)
@@ -1314,12 +1304,24 @@ def render(text,
return '<code%s%s>%s</code>' % (cls, id, escape(code[beg:end]))
text = regex_expand_meta.sub(expand_meta, text)
text = text.translate(ttab_out)
if environment:
def u2(match, environment=environment):
f = environment.get(match.group('a'), match.group(0))
if callable(f):
try:
f = f(match.group('b'))
except Exception, e:
f = 'ERROR: %s' % e
return str(f)
text = regex_env.sub(u2, text)
return text
def markmin2html(text, extra={}, allowed={}, sep='p',
def markmin2html(text, extra={}, allowed={}, sep='p',
autolinks='default', protolinks='default',
class_prefix='', id_prefix='markmin_', pretty_print=False):
return render(text, extra, allowed, sep,
class_prefix='', id_prefix='markmin_', pretty_print=False):
return render(text, extra, allowed, sep,
autolinks=autolinks, protolinks=protolinks,
class_prefix=class_prefix, id_prefix=id_prefix,
pretty_print=pretty_print)
@@ -1403,3 +1405,4 @@ if __name__ == '__main__':
print " file.markmin [file.css] - process file.markmin + built in file.css (optional)"
print " file.markmin [@path_to/css] - process file.markmin + link path_to/css (optional)"
run_doctests()

View File

@@ -66,12 +66,14 @@ class _BaseImporter(object):
help the management of this aspect.
"""
def __init__(self):
self._STANDARD_PYTHON_IMPORTER = _STANDARD_PYTHON_IMPORTER
def __call__(self, name, globals=None, locals=None,
fromlist=None, level=-1):
"""
The import method itself.
"""
return _STANDARD_PYTHON_IMPORTER(name,
return self._STANDARD_PYTHON_IMPORTER(name,
globals,
locals,
fromlist,
@@ -226,7 +228,8 @@ class _Web2pyImporter(_BaseImporter):
"""
global DEBUG
super(_Web2pyImporter, self).__init__()
self.super_class = super(_Web2pyImporter, self)
self.super_class.__init__()
self.web2py_path = web2py_path
self.__web2py_path_os_path_sep = self.web2py_path+os.path.sep
self.__web2py_path_os_path_sep_len = len(self.__web2py_path_os_path_sep)
@@ -284,16 +287,16 @@ class _Web2pyImporter(_BaseImporter):
globals, locals, fromlist, level)
else:
# import like "from x import a, b, ..."
return super(_Web2pyImporter, self) \
return self.super_class \
.__call__(modules_prefix+"."+name,
globals, locals, fromlist, level)
except ImportError, e:
try:
return super(_Web2pyImporter, self).__call__(name, globals, locals,
return self.super_class.__call__(name, globals, locals,
fromlist, level)
except ImportError, e1:
raise e
return super(_Web2pyImporter, self).__call__(name, globals, locals,
return self.super_class.__call__(name, globals, locals,
fromlist, level)
def __import__dot(self, prefix, name, globals, locals, fromlist,

View File

@@ -227,15 +227,21 @@ thread = threading.local()
# internal representation of tables with field
# <table>.<field>, tables and fields may only be [a-zA-Z0-9_]
regex_type = re.compile('^([\w\_\:]+)')
regex_dbname = re.compile('^(\w+)(\:\w+)*')
regex_safe = re.compile('^\w+$')
regex_table_field = re.compile('^(\w+)\.(\w+)$')
regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
regex_cleanup_fn = re.compile('[\'"\s;]+')
string_unpack=re.compile('(?<!\|)\|(?!\|)')
regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
regex_select_as_parser = re.compile("\s+AS\s+(\S+)")
REGEX_TYPE = re.compile('^([\w\_\:]+)')
REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
REGEX_W = re.compile('^\w+$')
REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
REGEX_QUOTES = re.compile("'[^']*'")
REGEX_ALPHANUMERIC = re.compile('^[a-zA-Z]\w*$')
# list of drivers will be built on the fly
# and lists only what is available
@@ -434,7 +440,7 @@ def AND(a,b):
def IDENTITY(x): return x
def varquote_aux(name,quotestr='%s'):
return name if regex_safe.match(name) else quotestr % name
return name if REGEX_W.match(name) else quotestr % name
if 'google' in DRIVERS:
@@ -1397,7 +1403,7 @@ class BaseAdapter(ConnectionPool):
if isinstance(item,SQLALL):
new_fields += item._table
elif isinstance(item,str):
if regex_table_field.match(item):
if REGEX_TABLE_DOT_FIELD.match(item):
tablename,fieldname = item.split('.')
append(db[tablename][fieldname])
else:
@@ -1419,7 +1425,7 @@ class BaseAdapter(ConnectionPool):
tablenames = tables(query)
for field in fields:
if isinstance(field, basestring) \
and regex_table_field.match(field):
and REGEX_TABLE_DOT_FIELD.match(field):
tn,fn = field.split('.')
field = self.db[tn][fn]
for tablename in tables(field):
@@ -1762,7 +1768,7 @@ class BaseAdapter(ConnectionPool):
elif field_type == 'blob' and not blob_decode:
return value
else:
key = regex_type.match(field_type).group(0)
key = REGEX_TYPE.match(field_type).group(0)
return self.parsemap[key](value,field_type)
def parse_reference(self, value, field_type):
@@ -1793,18 +1799,18 @@ class BaseAdapter(ConnectionPool):
def parse_datetime(self, value, field_type):
if not isinstance(value, datetime.datetime):
if '+' in value:
value,tz = value.split('+')
value = str(value)
date_part,time_part,timezone = value[:10],value[11:19],value[19:]
if '+' in timezone:
ms,tz = timezone.split('+')
h,m = tz.split(':')
dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
elif '-' in value:
value,tz = value.split('-')
elif '-' in timezone:
ms,tz = timezone.split('-')
h,m = tz.split(':')
dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
else:
dt = None
date_part, time_part = (
str(value).replace('T',' ')+' ').split(' ',1)
(y, m, d) = map(int,date_part.split('-'))
time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
while len(time_parts)<3: time_parts.append(0)
@@ -1877,7 +1883,7 @@ class BaseAdapter(ConnectionPool):
new_rows = []
tmps = []
for colname in colnames:
if not regex_table_field.match(colname):
if not REGEX_TABLE_DOT_FIELD.match(colname):
tmps.append(None)
else:
(tablename, fieldname) = colname.split('.')
@@ -1933,7 +1939,7 @@ class BaseAdapter(ConnectionPool):
self.parse_value(value,
fields[j].type,blob_decode)
new_column_name = \
regex_select_as_parser.search(colname)
REGEX_SELECT_AS_PARSER.search(colname)
if not new_column_name is None:
column_name = new_column_name.groups(0)
setattr(new_row,column_name[0],value)
@@ -2283,6 +2289,8 @@ class MySQLAdapter(BaseAdapter):
def concat_add(self,table):
return '; ALTER TABLE %s ADD ' % table
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -2295,7 +2303,7 @@ class MySQLAdapter(BaseAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, \
"Invalid URI string in DAL: %s" % self.uri
@@ -2371,9 +2379,9 @@ class PostgreSQLAdapter(BaseAdapter):
if self.driver_name == 'psycopg2':
return psycopg2_adapt(obj).getquoted()
elif self.driver_name == 'pg8000':
return str(obj).replace("%","%%").replace("'","''")
return "'%s'" % str(obj).replace("%","%%").replace("'","''")
else:
return str(obj).replace("'","''")
return "'%s'" % str(obj).replace("'","''")
def sequence_name(self,table):
return '%s_id_Seq' % table
@@ -2407,6 +2415,8 @@ class PostgreSQLAdapter(BaseAdapter):
# % (table._tablename, table._fieldname, table._sequence_name))
self.execute(query)
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}, srid=4326):
@@ -2420,7 +2430,7 @@ class PostgreSQLAdapter(BaseAdapter):
self.srid = srid
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, "Invalid URI string in DAL"
user = credential_decoder(m.group('user'))
@@ -2631,6 +2641,8 @@ class NewPostgreSQLAdapter(PostgreSQLAdapter):
class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
drivers = ('zxJDBC',)
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -2643,7 +2655,7 @@ class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, "Invalid URI string in DAL"
user = credential_decoder(m.group('user'))
@@ -2778,6 +2790,7 @@ class OracleAdapter(BaseAdapter):
def after_connection(self):
self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
def execute(self, command, args=None):
@@ -2905,6 +2918,10 @@ class MSSQLAdapter(BaseAdapter):
return '0'
return None
REGEX_DSN = re.compile('^(?P<dsn>.+)$')
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}, fake_connect=False, srid=4326):
@@ -2921,7 +2938,7 @@ class MSSQLAdapter(BaseAdapter):
ruri = uri.split('://',1)[1]
if '@' not in ruri:
try:
m = re.compile('^(?P<dsn>.+)$').match(ruri)
m = self.REGEX_DSN.match(ruri)
if not m:
raise SyntaxError, \
'Parsing uri string(%s) has no result' % self.uri
@@ -2934,7 +2951,7 @@ class MSSQLAdapter(BaseAdapter):
# was cnxn = 'DSN=%s' % dsn
cnxn = dsn
else:
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, \
"Invalid URI string in DAL: %s" % self.uri
@@ -2955,9 +2972,8 @@ class MSSQLAdapter(BaseAdapter):
# (in the form of arg1=value1&arg2=value2&...)
# Default values (drivers like FreeTDS insist on uppercase parameter keys)
argsdict = { 'DRIVER':'{SQL Server}' }
urlargs = m.group('urlargs') or ''
argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
for argmatch in argpattern.finditer(urlargs):
urlargs = m.group('urlargs') or ''
for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
@@ -3117,7 +3133,7 @@ class SybaseAdapter(MSSQLAdapter):
ruri = uri.split('://',1)[1]
if '@' not in ruri:
try:
m = re.compile('^(?P<dsn>.+)$').match(ruri)
m = self.REGEX_DSN.match(ruri)
if not m:
raise SyntaxError, \
'Parsing uri string(%s) has no result' % self.uri
@@ -3128,7 +3144,7 @@ class SybaseAdapter(MSSQLAdapter):
logger.error('NdGpatch error')
raise e
else:
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(ruri)
m = self.REGEX_URI.match(uri)
if not m:
raise SyntaxError, \
"Invalid URI string in DAL: %s" % self.uri
@@ -3220,6 +3236,8 @@ class FireBirdAdapter(BaseAdapter):
return ['DELETE FROM %s;' % table._tablename,
'SET GENERATOR %s TO 0;' % table._sequence_name]
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -3232,7 +3250,7 @@ class FireBirdAdapter(BaseAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, "Invalid URI string in DAL: %s" % self.uri
user = credential_decoder(m.group('user'))
@@ -3277,6 +3295,8 @@ class FireBirdAdapter(BaseAdapter):
class FireBirdEmbeddedAdapter(FireBirdAdapter):
drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -3289,7 +3309,7 @@ class FireBirdEmbeddedAdapter(FireBirdAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, \
"Invalid URI string in DAL: %s" % self.uri
@@ -3383,6 +3403,8 @@ class InformixAdapter(BaseAdapter):
return "to_date('%s','%%Y-%%m-%%d %%H:%%M:%%S')" % obj
return None
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -3395,7 +3417,7 @@ class InformixAdapter(BaseAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, \
"Invalid URI string in DAL: %s" % self.uri
@@ -3584,7 +3606,7 @@ INGRES_SEQNAME='ii***lineitemsequence' # NOTE invalid database object name
# to be a delimited identifier)
class IngresAdapter(BaseAdapter):
drivers = ('ingredbi',)
drivers = ('ingresdbi',)
types = {
'boolean': 'CHAR(1)',
@@ -3762,6 +3784,9 @@ class SAPDBAdapter(BaseAdapter):
% (table._tablename, table._id.name, table._sequence_name))
self.execute(query)
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -3774,7 +3799,7 @@ class SAPDBAdapter(BaseAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, "Invalid URI string in DAL"
user = credential_decoder(m.group('user'))
@@ -3803,6 +3828,8 @@ class SAPDBAdapter(BaseAdapter):
class CubridAdapter(MySQLAdapter):
drivers = ('cubriddb',)
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -3815,7 +3842,7 @@ class CubridAdapter(MySQLAdapter):
self.db_codec = db_codec
self.find_or_make_work_folder()
ruri = uri.split('://',1)[1]
m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, \
"Invalid URI string in DAL: %s" % self.uri
@@ -3934,6 +3961,8 @@ class UseDatabaseStoredFile:
class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
uploads_in_blob = True
REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
def __init__(self, db, uri='google:sql://realm:domain/database',
pool_size=0, folder=None, db_codec='UTF-8',
credential_decoder=IDENTITY, driver_args={},
@@ -3947,7 +3976,7 @@ class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
self.folder = folder or pjoin('$HOME',thread.folder.split(
os.sep+'applications'+os.sep,1)[1])
ruri = uri.split("://")[1]
m = re.compile('^(?P<instance>.*)/(?P<db>.*)$').match(ruri)
m = self.REGEX_URI.match(ruri)
if not m:
raise SyntaxError, "Invalid URI string in SQLDB: %s" % self.uri
instance = credential_decoder(m.group('instance'))
@@ -4143,6 +4172,8 @@ class GoogleDatastoreAdapter(NoSQLAdapter):
def file_open(self, filename, mode='rb', lock=True): pass
def file_close(self, fileobj): pass
REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=IDENTITY, driver_args={},
adapter_args={}):
@@ -4174,7 +4205,7 @@ class GoogleDatastoreAdapter(NoSQLAdapter):
db['_lastsql'] = ''
self.db_codec = 'UTF-8'
self.pool_size = 0
match = re.compile('.*://(?P<namespace>.+)').match(uri)
match = self.REGEX_NAMESPACE.match(uri)
if match:
namespace_manager.set_namespace(match.group('namespace'))
@@ -4734,11 +4765,8 @@ def cleanup(text):
"""
validates that the given text is clean: only contains [0-9a-zA-Z_]
"""
if re.compile('[^0-9a-zA-Z_]').findall(text):
raise SyntaxError, \
'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
% text
if not REGEX_ALPHANUMERIC.match(text):
raise SyntaxError, 'invalid table or field name: %s' % text
return text
class MongoDBAdapter(NoSQLAdapter):
@@ -5474,6 +5502,8 @@ class IMAPAdapter(NoSQLAdapter):
dbengine = 'imap'
REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
def __init__(self,
db,
uri,
@@ -5521,7 +5551,7 @@ class IMAPAdapter(NoSQLAdapter):
db['_lastsql'] = ''
m = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$').match(uri)
m = self.REGEX_URI.match(uri)
user = m.group('user')
password = m.group('password')
host = m.group('host')
@@ -6364,7 +6394,8 @@ def bar_decode_integer(value):
return [int(x) for x in value.split('|') if x.strip()]
def bar_decode_string(value):
return [x.replace('||', '|') for x in string_unpack.split(value[1:-1]) if x.strip()]
return [x.replace('||', '|') for x in
REGEX_UNPACK.split(value[1:-1]) if x.strip()]
class Row(object):
@@ -6379,7 +6410,7 @@ class Row(object):
def __getitem__(self, key):
key=str(key)
m = regex_table_field.match(key)
m = REGEX_TABLE_DOT_FIELD.match(key)
if key in self.get('_extra',{}):
return self._extra[key]
elif m:
@@ -6429,7 +6460,7 @@ class Row(object):
return '<Row %s>' % self.__dict__
def __int__(self):
return dict.__getitem__(self,'id')
return object.__getattribute__(self,'id')
def __eq__(self,other):
try:
@@ -6506,11 +6537,10 @@ def smart_query(fields,text):
n = str(field).lower()
if not n in field_map:
field_map[n] = field
re_constants = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
constants = {}
i = 0
while True:
m = re_constants.search(text)
m = REGEX_CONST_STRING.search(text)
if not m: break
text = text[:m.start()]+('#%i' % i)+text[m.end():]
constants[str(i)] = m.group()[1:-1]
@@ -6720,7 +6750,7 @@ class DAL(object):
try:
if is_jdbc and not uri.startswith('jdbc:'):
uri = 'jdbc:'+uri
self._dbname = regex_dbname.match(uri).group()
self._dbname = REGEX_DBNAME.match(uri).group()
if not self._dbname in ADAPTERS:
raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
# notice that driver args or {} else driver_args
@@ -6833,8 +6863,8 @@ def index():
"""
db = self
re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
re2 = re.compile('^.+\[.+\]$')
re1 = REGEX_SEARCH_PATTERN
re2 = REGEX_SQUARE_BRACKETS
def auto_table(table,base='',depth=0):
patterns = []
@@ -7037,7 +7067,7 @@ def index():
if not isinstance(tablename,str):
raise SyntaxError, "missing table name"
elif tablename.startswith('_') or hasattr(self,tablename) or \
regex_python_keywords.match(tablename):
REGEX_PYTHON_KEYWORDS.match(tablename):
raise SyntaxError, 'invalid table name: %s' % tablename
elif tablename in self.tables:
raise SyntaxError, 'table already defined: %s' % tablename
@@ -7168,21 +7198,34 @@ def index():
[{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
Added 2012-08-24 "fields" optional argument. If not None, the
results cursor returned by the DB driver will be converted to a
DAL Rows object using the db._adapter.parse() method. This requires
specifying the "fields" argument as a list of DAL Field objects
that match the fields returned from the DB. The Field objects should
be part of one or more Table objects defined on the DAL object.
The "fields" list can include one or more DAL Table objects in addition
to or instead of including Field objects, or it can be just a single
table (not in a list). In that case, the Field objects will be
extracted from the table(s).
Added 2012-08-24 "fields" and "colnames" optional arguments. If either
is provided, the results cursor returned by the DB driver will be
converted to a DAL Rows object using the db._adapter.parse() method.
The "fields" argument is a list of DAL Field objects that match the
fields returned from the DB. The Field objects should be part of one or
more Table objects defined on the DAL object. The "fields" list can
include one or more DAL Table objects in addition to or instead of
including Field objects, or it can be just a single table (not in a
list). In that case, the Field objects will be extracted from the
table(s).
The field names will be extracted from the Field objects, or optionally,
a list of field names can be provided (in tablename.fieldname format)
via the "colnames" argument. Note, the fields and colnames must be in
the same order as the fields in the results cursor returned from the DB.
Instead of specifying the "fields" argument, the "colnames" argument
can be specified as a list of field names in tablename.fieldname format.
Again, these should represent tables and fields defined on the DAL
object.
It is also possible to specify both "fields" and the associated
"colnames". In that case, "fields" can also include DAL Expression
objects in addition to Field objects. For Field objects in "fields",
the associated "colnames" must still be in tablename.fieldname format.
For Expression objects in "fields", the associated "colnames" can
be any arbitrary labels.
Note, the DAL Table objects referred to by "fields" or "colnames" can
be dummy tables and do not have to represent any real tables in the
database. Also, note that the "fields" and "colnames" must be in the
same order as the fields in the results cursor returned from the DB.
"""
adapter = self._adapter
if placeholders:
@@ -7203,8 +7246,12 @@ def index():
# convert the list for each row into a dictionary so it's
# easier to work with. row['field_name'] rather than row[0]
return [dict(zip(fields,row)) for row in data]
data = adapter.cursor.fetchall()
if fields:
try:
data = adapter.cursor.fetchall()
except:
return None
if fields or colnames:
fields = [] if fields is None else fields
if not isinstance(fields, list):
fields = [fields]
extracted_fields = []
@@ -8348,7 +8395,7 @@ class Field(Expression):
self.second = None
self.name = fieldname = cleanup(fieldname)
if not isinstance(fieldname,str) or hasattr(Table,fieldname) or \
fieldname[0] == '_' or regex_python_keywords.match(fieldname):
fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname):
raise SyntaxError, 'Field: invalid field name: %s' % fieldname
self.type = type if not isinstance(type, Table) else 'reference %s' % type
self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512)
@@ -8402,8 +8449,8 @@ class Field(Expression):
elif not filename:
filename = file.name
filename = os.path.basename(filename.replace('/', os.sep)\
.replace('\\', os.sep))
m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
.replace('\\', os.sep))
m = REGEX_STORE_PATTERN.search(filename)
extension = m and m.group('e') or 'txt'
uuid_key = web2py_uuid().replace('-', '')[-16:]
encoded_filename = base64.b16encode(filename).lower()
@@ -8456,7 +8503,7 @@ class Field(Expression):
raise http.HTTP(404)
if self.authorize and not self.authorize(row):
raise http.HTTP(403)
m = regex_content.match(name)
m = REGEX_UPLOAD_PATTERN.match(name)
if not m or not self.isattachment:
raise TypeError, 'Can\'t retrieve %s' % name
file_properties = self.retrieve_file_properties(name,path)
@@ -8481,11 +8528,11 @@ class Field(Expression):
if self.custom_retrieve_file_properties:
return self.custom_retrieve_file_properties(name, path)
try:
m = regex_content.match(name)
m = REGEX_UPLOAD_PATTERN.match(name)
if not m or not self.isattachment:
raise TypeError, 'Can\'t retrieve %s file properties' % name
filename = base64.b16decode(m.group('name'), True)
filename = regex_cleanup_fn.sub('_', filename)
filename = REGEX_CLEANUP_FN.sub('_', filename)
except (TypeError, AttributeError):
filename = name
if isinstance(self_uploadfield, str): # ## if file is in DB
@@ -8595,7 +8642,6 @@ class Query(object):
return Query(self.db,self.db._adapter.NOT,self)
regex_quotes = re.compile("'[^']*'")
def xorify(orderby):
@@ -9104,7 +9150,7 @@ class Rows(object):
for record in self:
row = []
for col in colnames:
if not regex_table_field.match(col):
if not REGEX_TABLE_DOT_FIELD.match(col):
row.append(record._extra[col])
else:
(t, f) = col.split('.')
@@ -9123,8 +9169,7 @@ class Rows(object):
def xml(self,strict=False,row_name='row',rows_name='rows'):
"""
serializes the table using sqlhtml.SQLTABLE (if present)
"""
alphanumeric = re.compile('[a-zA-Z]\w*')
"""
if strict:
ncols = len(self.colnames)
def f(row,field,indent=' '):
@@ -9138,9 +9183,8 @@ class Rows(object):
indent,
field)
elif not callable(row):
if alphanumeric.match(field):
return '%s<%s>%s</%s>' % \
(indent,field,row,field)
if REGEX_ALPHANUMERIC.match(field):
return '%s<%s>%s</%s>' % (indent,field,row,field)
else:
return '%s<extra name="%s">%s</extra>' % \
(indent,field,row)
@@ -9164,7 +9208,7 @@ class Rows(object):
def inner_loop(record, col):
(t, f) = col.split('.')
res = None
if not regex_table_field.match(col):
if not REGEX_TABLE_DOT_FIELD.match(col):
key = col
res = record._extra[col]
else:

View File

@@ -800,8 +800,8 @@ def findT(path, language='en'):
"""
must be run by the admin app
"""
filename = ospath.join(path, 'languages', language + '.py')
sentences = read_dict(filename)
lang_file = ospath.join(path, 'languages', language + '.py')
sentences = read_dict(lang_file)
mp = ospath.join(path, 'models')
cp = ospath.join(path, 'controllers')
vp = ospath.join(path, 'views')
@@ -830,8 +830,9 @@ def findT(path, language='en'):
'en' if language in ('default', 'en') else language)
if not '!langname!' in sentences:
sentences['!langname!'] = (
'English' if language in ('default', 'en') else sentences['!langcode!'])
write_dict(filename, sentences)
'English' if language in ('default', 'en')
else sentences['!langcode!'])
write_dict(lang_file, sentences)
### important to allow safe session.flash=T(....)
def lazyT_unpickle(data):

View File

@@ -91,7 +91,8 @@ from validators import CRYPT
from cache import Cache
from html import URL, xmlescape
from utils import is_valid_ip_address
from rewrite import load, url_in, thread as rwthread, try_rewrite_on_error
from rewrite import load, url_in, thread as rwthread, \
try_rewrite_on_error, fixup_missing_path_info
import newcron
__all__ = ['wsgibase', 'save_password', 'appfactory', 'HttpServer']
@@ -148,10 +149,11 @@ def copystream_progress(request, chunk_size= 10**5):
and stores progress upload status in cache.ram
X-Progress-ID:length and X-Progress-ID:uploaded
"""
if not request.env.content_length:
env = request.env
if not env.content_length:
return cStringIO.StringIO()
source = request.env.wsgi_input
size = int(request.env.content_length)
source = env.wsgi_input
size = int(env.content_length)
dest = tempfile.TemporaryFile()
if not 'X-Progress-ID' in request.vars:
copystream(source, dest, size, chunk_size)
@@ -274,7 +276,8 @@ def environ_aux(environ,request):
def parse_get_post_vars(request, environ):
# always parse variables in URL for GET, POST, PUT, DELETE, etc. in get_vars
dget = cgi.parse_qsl(request.env.query_string or '', keep_blank_values=1)
env = request.env
dget = cgi.parse_qsl(env.query_string or '', keep_blank_values=1)
for (key, value) in dget:
if key in request.get_vars:
if isinstance(request.get_vars[key], list):
@@ -290,7 +293,7 @@ def parse_get_post_vars(request, environ):
request.body = body = copystream_progress(request)
except IOError:
raise HTTP(400,"Bad Request - HTTP body is incomplete")
if (body and request.env.request_method in ('POST', 'PUT', 'BOTH')):
if (body and env.request_method in ('POST', 'PUT', 'BOTH')):
dpost = cgi.FieldStorage(fp=body,environ=environ,keep_blank_values=1)
# The same detection used by FieldStorage to detect multipart POSTs
is_multipart = dpost.type[:10] == 'multipart/'
@@ -382,26 +385,7 @@ def wsgibase(environ, responder):
# serve file if static
# ##################################################
eget = environ.get
if not eget('PATH_INFO') and eget('REQUEST_URI'):
# for fcgi, get path_info and
# query_string from request_uri
items = environ['REQUEST_URI'].split('?')
environ['PATH_INFO'] = items[0]
if len(items) > 1:
environ['QUERY_STRING'] = items[1]
else:
environ['QUERY_STRING'] = ''
elif not eget('REQUEST_URI'):
if eget('QUERY_STRING'):
environ['REQUEST_URI'] = eget('PATH_INFO') + '?' + eget('QUERY_STRING')
else:
environ['REQUEST_URI'] = eget('PATH_INFO')
if not eget('HTTP_HOST'):
environ['HTTP_HOST'] = \
eget('SERVER_NAME') + ':' + eget('SERVER_PORT')
fixup_missing_path_info(environ)
(static_file, environ) = url_in(request, environ)
if static_file:
@@ -438,6 +422,7 @@ def wsgibase(environ, responder):
is_https = env.wsgi_url_scheme \
in ['https', 'HTTPS'] or env.https=='on')
request.uuid = request.compute_uuid() # requires client
request.url = environ['PATH_INFO']
# ##################################################
# access the requested application
@@ -460,9 +445,6 @@ def wsgibase(environ, responder):
elif not request.is_local and \
exists(pjoin(request.folder,'DISABLED')):
raise HTTP(503, "<html><body><h1>Temporarily down for maintenance</h1></body></html>")
request.url = URL(r=request,
args=request.args,
extension=request.raw_extension)
# ##################################################
# build missing folders
@@ -492,9 +474,9 @@ def wsgibase(environ, responder):
# load cookies
# ##################################################
if request.env.http_cookie:
if env.http_cookie:
try:
request.cookies.load(request.env.http_cookie)
request.cookies.load(env.http_cookie)
except Cookie.CookieError, e:
pass # invalid cookies
@@ -502,7 +484,8 @@ def wsgibase(environ, responder):
# try load session or create new session file
# ##################################################
session.connect(request, response)
if not env.web2py_disable_session:
session.connect(request, response)
# ##################################################
# set no-cache headers
@@ -558,7 +541,7 @@ def wsgibase(environ, responder):
# if session not in db try store session on filesystem
# this must be done after trying to commit database!
# ##################################################
session._try_store_on_disk(request, response)
# ##################################################
@@ -566,7 +549,7 @@ def wsgibase(environ, responder):
# ##################################################
if request.cid:
rheaders = response.headers
rheaders = http_response.headers
if response.flash and \
not 'web2py-component-flash' in rheaders:
rheaders['web2py-component-flash'] = \

View File

@@ -38,6 +38,7 @@ thread = threading.local() # thread-local storage for routing params
regex_at = re.compile(r'(?<!\\)\$[a-zA-Z]\w*')
regex_anything = re.compile(r'(?<!\\)\$anything')
regex_redirect = re.compile(r'(\d+)->(.*)')
regex_full_url = re.compile(r'^(?P<scheme>http|https|HTTP|HTTPS)\://(?P<host>[^/]*)(?P<uri>.*)')
def _router_default():
"return new copy of default base router"
@@ -134,6 +135,27 @@ ROUTER_BASE_KEYS = set(
# filter_err: helper for doctest & unittest
# regex_filter_out: doctest
def fixup_missing_path_info(environ):
eget = environ.get
path_info = eget('PATH_INFO')
request_uri = eget('REQUEST_URI')
if not path_info and request_uri:
# for fcgi, get path_info and
# query_string from request_uri
items = request_uri.split('?')
path_info = environ['PATH_INFO'] = items[0]
environ['QUERY_STRING'] = items[1] if len(items) > 1 else ''
elif not request_uri:
query_string = eget('QUERY_STRING')
if query_string:
environ['REQUEST_URI'] = '%s?%s' % (path_info,query_string)
else:
environ['REQUEST_URI'] = path_info
if not eget('HTTP_HOST'):
environ['HTTP_HOST'] = \
'%s:%s' % (eget('SERVER_NAME'),eget('SERVER_PORT'))
def url_in(request, environ):
"parse and rewrite incoming URL"
if routers:
@@ -286,8 +308,8 @@ def load(routes='routes.py', app=None, data=None, rdict=None):
for sym in ('routes_app', 'routes_in', 'routes_out'):
if sym in symbols:
for (k, v) in symbols[sym]:
p[sym].append(compile_regex(k, v))
for items in symbols[sym]:
p[sym].append(compile_regex(*items))
for sym in ('routes_onerror', 'routes_apps_raw',
'error_handler','error_message', 'error_message_ticket',
'default_application','default_controller', 'default_function',
@@ -349,7 +371,7 @@ def load(routes='routes.py', app=None, data=None, rdict=None):
log_rewrite('URL rewrite is on. configuration in %s' % path)
def compile_regex(k, v):
def compile_regex(k, v, env=None):
"""
Preprocess and compile the regular expressions in routes_app/in/out
The resulting regex will match a pattern of the form:
@@ -383,7 +405,7 @@ def compile_regex(k, v):
# same for replacement pattern, but with \g
for item in regex_at.findall(v):
v = v.replace(item, r'\g<%s>' % item[1:])
return (re.compile(k, re.DOTALL), v)
return (re.compile(k, re.DOTALL), v, env or {})
def load_routers(all_apps):
"load-time post-processing of routers"
@@ -497,8 +519,9 @@ def regex_uri(e, regexes, tag, default=None):
(e.get('REMOTE_ADDR','localhost'),
e.get('wsgi.url_scheme', 'http').lower(), host,
e.get('REQUEST_METHOD', 'get').lower(), path)
for (regex, value) in regexes:
for (regex, value, custom_env) in regexes:
if regex.match(key):
e.update(custom_env)
rewritten = regex.sub(value, key)
log_rewrite('%s: [%s] [%s] -> %s' % (tag, key, value, rewritten))
return rewritten
@@ -686,7 +709,7 @@ def regex_filter_out(url, e=None):
e.get('request_method', 'get').lower(), items[0])
else:
items[0] = ':http://localhost:get %s' % items[0]
for (regex, value) in thread.routes.routes_out:
for (regex, value, tmp) in thread.routes.routes_out:
if regex.match(items[0]):
rewritten = '?'.join([regex.sub(value, items[0])] + items[1:])
log_rewrite('routes_out: [%s] -> %s' % (url, rewritten))
@@ -695,11 +718,14 @@ def regex_filter_out(url, e=None):
return url
def filter_url(url, method='get', remote='0.0.0.0', out=False, app=False, lang=None,
domain=(None,None), env=False, scheme=None, host=None, port=None):
"doctest/unittest interface to regex_filter_in() and regex_filter_out()"
regex_url = re.compile(r'^(?P<scheme>http|https|HTTP|HTTPS)\://(?P<host>[^/]*)(?P<uri>.*)')
match = regex_url.match(url)
def filter_url(url, method='get', remote='0.0.0.0',
out=False, app=False, lang=None,
domain=(None,None), env=False, scheme=None,
host=None, port=None):
"""
doctest/unittest interface to regex_filter_in() and regex_filter_out()
"""
match = regex_full_url.match(url)
urlscheme = match.group('scheme').lower()
urlhost = match.group('host').lower()
uri = match.group('uri')

View File

@@ -88,7 +88,7 @@ except:
from simplejson import loads, dumps
from gluon import DAL, Field, IS_NOT_EMPTY, IS_IN_SET, IS_NOT_IN_DB
from gluon import DAL, Field, IS_NOT_EMPTY, IS_IN_SET, IS_NOT_IN_DB, IS_INT_IN_RANGE
from gluon.utils import web2py_uuid
@@ -454,15 +454,20 @@ class Scheduler(MetaScheduler):
Field('args','text',default='[]',requires=TYPE(list)),
Field('vars','text',default='{}',requires=TYPE(dict)),
Field('enabled','boolean',default=True),
Field('start_time','datetime',default=now),
Field('start_time','datetime',default=now, requires=IS_NOT_EMPTY()),
Field('next_run_time','datetime',default=now),
Field('stop_time','datetime'),
Field('repeats','integer',default=1,comment="0=unlimited"),
Field('retry_failed', 'integer', default=0, comment="-1=unlimited"),
Field('period','integer',default=60,comment='seconds'),
Field('timeout','integer',default=60,comment='seconds'),
Field('repeats','integer',default=1,comment="0=unlimited",
requires=IS_INT_IN_RANGE(0, None)),
Field('retry_failed', 'integer', default=0, comment="-1=unlimited",
requires=IS_INT_IN_RANGE(-1, None)),
Field('period','integer',default=60,comment='seconds',
requires=IS_INT_IN_RANGE(0, None)),
Field('timeout','integer',default=60,comment='seconds',
requires=IS_INT_IN_RANGE(0, None)),
Field('sync_output', 'integer', default=0,
comment="update output every n sec: 0=never"),
comment="update output every n sec: 0=never",
requires=IS_INT_IN_RANGE(0, None)),
Field('times_run','integer',default=0,writable=False),
Field('times_failed','integer',default=0,writable=False),
Field('last_run_time','datetime',writable=False,readable=False),
@@ -870,4 +875,3 @@ def main():
if __name__=='__main__':
main()

View File

@@ -24,7 +24,7 @@ from html import FORM, INPUT, LABEL, OPTION, SELECT, BUTTON
from html import TABLE, THEAD, TBODY, TR, TD, TH, STYLE
from html import URL, truncate_string, FIELDSET
from dal import DAL, Field, Table, Row, CALLABLETYPES, smart_query, \
bar_encode, regex_table_field, Reference
bar_encode, Reference, REGEX_TABLE_DOT_FIELD
from storage import Storage
from utils import md5_hash
from validators import IS_EMPTY_OR, IS_NOT_EMPTY, IS_LIST_OF, IS_DATE, \
@@ -702,17 +702,17 @@ def formstyle_bootstrap(form, fields):
_submit = False
if isinstance(controls, INPUT):
controls['_class'] = 'input-xlarge'
controls.add_class('input-xlarge')
if controls['_type'] == 'submit':
# flag submit button
_submit = True
controls['_class'] = 'btn btn-primary'
if isinstance(controls, SELECT):
controls['_class'] = 'input-xlarge'
controls.add_class('input-xlarge')
if isinstance(controls, TEXTAREA):
controls['_class'] = 'input-xlarge'
controls.add_class('input-xlarge')
if isinstance(label, LABEL):
label['_class'] = 'control-label'
@@ -1376,8 +1376,8 @@ class SQLFORM(FORM):
# this should never happen but seems to happen to some
del fields['delete_this_record']
for field in self.table:
if not field.name in fields and field.writable==False \
and field.update is None:
if not field.name in fields and field.writable is False \
and field.update is None and field.compute is None:
if record_id and self.record:
fields[field.name] = self.record[field.name]
elif not self.table[field.name].default is None:
@@ -1498,11 +1498,10 @@ class SQLFORM(FORM):
selectfields = []
for field in fields:
name = str(field).replace('.','-')
criterion = []
options = search_options.get(field.type,None)
if options:
label = isinstance(field.label,str) and T(field.label) or field.label
selectfields.append((str(field),label))
selectfields.append(OPTION(label, _value=str(field)))
operators = SELECT(*[T(option) for option in options])
if field.type=='boolean':
value_input = SELECT(
@@ -1510,30 +1509,39 @@ class SQLFORM(FORM):
OPTION(T("False"),_value="F"),
_id="w2p_value_"+name)
else:
value_input = INPUT(_type='text',_id="w2p_value_"+name,
value_input = INPUT(_type='text',
_id="w2p_value_"+name,
_class=field.type)
new_button = INPUT(
_type="button", _value=T('New'),_class="btn",
_onclick="w2p_build_query('new','"+str(field)+"')")
_onclick="w2p_build_query('new','%s')" % field)
and_button = INPUT(
_type="button", _value=T('And'),_class="btn",
_onclick="w2p_build_query('and','"+str(field)+"')")
_onclick="w2p_build_query('and','%s')" % field)
or_button = INPUT(
_type="button", _value=T('Or'),_class="btn",
_onclick="w2p_build_query('or','"+str(field)+"')")
_onclick="w2p_build_query('or','%s')" % field)
close_button = INPUT(
_type="button", _value=T('Close'),_class="btn",
_onclick="jQuery('#w2p_query_panel').slideUp()")
criterion.extend([operators,value_input,new_button,and_button,or_button])
criteria.append(DIV(criterion, _id='w2p_field_%s' % name,
_class='w2p_query_row hidden'))
criteria.append(DIV(
operators,value_input,new_button,
and_button,or_button,close_button,
_id='w2p_field_%s' % name,
_class='w2p_query_row hidden',
_style='display:inline'))
criteria.insert(0,SELECT(
_id="w2p_query_fields",
_onchange="jQuery('.w2p_query_row').hide();jQuery('#w2p_field_'+jQuery('#w2p_query_fields').val().replace('.','-')).show();",
*[OPTION(label, _value=fname) for fname,label in selectfields]))
_style='float:left',
*selectfields))
fadd = SCRIPT("""
jQuery('#w2p_query_panel input,#w2p_query_panel select').css(
'width','auto').css('float','left');
jQuery('#w2p_query_panel input,#w2p_query_panel select').css('width','auto');
jQuery(function(){web2py_ajax_fields('#w2p_query_panel');});
function w2p_build_query(aggregator,a){
function w2p_build_query(aggregator,a) {
var b=a.replace('.','-');
var option = jQuery('#w2p_field_'+b+' select').val();
var value = jQuery('#w2p_value_'+b).val().replace('"','\\\\"');
@@ -1541,10 +1549,11 @@ class SQLFORM(FORM):
var k=jQuery('#web2py_keywords');
var v=k.val();
if(aggregator=='new') k.val(s); else k.val((v?(v+' '+ aggregator +' '):'')+s);
jQuery('#w2p_query_panel').slideUp();
}
""")
return CAT(DIV(_id="w2p_query_panel",_class='hidden',*criteria),fadd)
return CAT(
DIV(_id="w2p_query_panel",_class='hidden',*criteria),fadd)
@staticmethod
@@ -1663,8 +1672,9 @@ class SQLFORM(FORM):
session.flash = T('not authorized')
redirect(referrer)
def gridbutton(buttonclass='buttonadd',buttontext='Add',
buttonurl=url(args=[]),callback=None,delete=None,trap=True):
def gridbutton(buttonclass='buttonadd', buttontext='Add',
buttonurl=url(args=[]), callback=None,
delete=None, trap=True):
if showbuttontext:
if callback:
return A(SPAN(_class=ui.get(buttonclass)),
@@ -1854,12 +1864,6 @@ class SQLFORM(FORM):
session['_web2py_grid_referrer_'+formname] = url2(vars=request.vars)
console = DIV(_class='web2py_console %(header)s %(cornertop)s' % ui)
if create:
console.append(gridbutton(
buttonclass='buttonadd',
buttontext='Add',
buttonurl=url(args=['new',tablename])))
error = None
if searchable:
sfields = reduce(lambda a,b:a+b,
@@ -1868,13 +1872,13 @@ class SQLFORM(FORM):
search_widget = search_widget[tablename]
if search_widget=='default':
search_menu = SQLFORM.search_menu(sfields)
search_widget = lambda sfield, url: FORM(
search_widget = lambda sfield, url: DIV(FORM(
INPUT(_name='keywords',_value=request.vars.keywords,
_id='web2py_keywords',_onfocus="jQuery('#w2p_query_fields').change();jQuery('#w2p_query_panel').slideDown();"),
INPUT(_type='submit',_value=T('Search'),_class="btn"),
INPUT(_type='submit',_value=T('Clear'),_class="btn",
_onclick="jQuery('#web2py_keywords').val('');"),
search_menu,_method="GET",_action=url)
_method="GET",_action=url),search_menu)
form = search_widget and search_widget(sfields,url()) or ''
console.append(form)
keywords = request.vars.get('keywords','')
@@ -1888,6 +1892,12 @@ class SQLFORM(FORM):
error = T('Invalid query')
else:
subquery = None
if create:
console.append(gridbutton(
buttonclass='buttonadd',
buttontext='Add',
buttonurl=url(args=['new',tablename])))
if subquery:
dbset = dbset(subquery)
try:
@@ -2568,7 +2578,7 @@ class ExportClass(object):
for record in self.rows:
row = []
for col in self.rows.colnames:
if not regex_table_field.match(col):
if not REGEX_TABLE_DOT_FIELD.match(col):
row.append(record._extra[col])
else:
(t, f) = col.split('.')

View File

@@ -136,7 +136,7 @@ class StorageList(Storage):
like Storage but missing elements default to [] instead of None
"""
def __getitem__(self,key):
return self.__gteattr__(key)
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return getattr(self,key)

View File

@@ -0,0 +1,22 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Unit tests for running web2py
"""
import sys
import os
if os.path.isdir('gluon'):
sys.path.append(os.path.realpath('gluon'))
else:
sys.path.append(os.path.realpath('../'))
import unittest
from gluon.contrib.markmin.markmin2html import run_doctests
class TestMarkmin(unittest.TestCase):
def testMarkmin(self):
run_doctests()
if __name__ == '__main__':
unittest.main()

View File

@@ -839,6 +839,7 @@ class Auth(object):
table_event = None,
table_cas = None,
showid = False,
use_username = False,
login_email_validate = True,
login_userfield = None,
logout_onlogout = None,
@@ -1060,7 +1061,6 @@ class Auth(object):
request = current.request
session = current.session
auth = session.auth
self.use_username = None # None means postpone detection
self.user_groups = auth and auth.user_groups or {}
if auth and auth.last_visit and auth.last_visit + \
datetime.timedelta(days=0, seconds=auth.expiration) > request.now:
@@ -1251,21 +1251,21 @@ class Auth(object):
else:
login = A(T('Login'), _href=href('login'))
register = A(T('Register'), _href=href('register'))
retrieve_username = A(T('Forgot username?'), _href=href('retrieve_username'))
lost_password = A(T('Lost password?'), _href=href('request_reset_password'))
retrieve_username = A(
T('Forgot username?'), _href=href('retrieve_username'))
lost_password = A(
T('Lost password?'), _href=href('request_reset_password'))
bar = SPAN(s1, login, s3, _class='auth_navbar')
if not 'register' in self.settings.actions_disabled:
bar.insert(-1, s2)
bar.insert(-1, register)
if self.use_username is None:
# should always be false if auth.define_tables() is called
self.use_username = 'username' in self.table_user().fields
if self.use_username and \
not 'retrieve_username' in self.settings.actions_disabled:
if self.settings.use_username and not 'retrieve_username' \
in self.settings.actions_disabled:
bar.insert(-1, s2)
bar.insert(-1, retrieve_username)
if not 'request_reset_password' in self.settings.actions_disabled:
if not 'request_reset_password' \
in self.settings.actions_disabled:
bar.insert(-1, s2)
bar.insert(-1, lost_password)
return bar
@@ -1357,7 +1357,7 @@ class Auth(object):
writable=False,readable=False,
label=T('Modified By')))
def define_tables(self, username=False, signature=None,
def define_tables(self, username=None, signature=None,
migrate=True, fake_migrate=False):
"""
to be called unless tables are defined manually
@@ -1375,7 +1375,10 @@ class Auth(object):
db = self.db
settings = self.settings
self.use_username = username
if username is None:
username = settings.use_username
else:
settings.use_username = username
if not self.signature:
self.define_signature()
if signature==True:

View File

@@ -1376,6 +1376,7 @@ class IS_GENERIC_URL(Validator):
"""
def __init__(
self,
error_message='enter a valid URL',
@@ -1402,6 +1403,9 @@ class IS_GENERIC_URL(Validator):
"prepend_scheme='%s' is not in allowed_schemes=%s" \
% (self.prepend_scheme, self.allowed_schemes)
GENERIC_URL = re.compile(r"%[^0-9A-Fa-f]{2}|%[^0-9A-Fa-f][0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]|%$|%[0-9A-Fa-f]$|%[^0-9A-Fa-f]$")
GENERIC_URL_VALID = re.compile(r"[A-Za-z0-9;/?:@&=+$,\-_\.!~*'\(\)%#]+$")
def __call__(self, value):
"""
:param value: a string, the URL to validate
@@ -1411,12 +1415,9 @@ class IS_GENERIC_URL(Validator):
"""
try:
# if the URL does not misuse the '%' character
if not re.compile(
r"%[^0-9A-Fa-f]{2}|%[^0-9A-Fa-f][0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]|%$|%[0-9A-Fa-f]$|%[^0-9A-Fa-f]$"
).search(value):
if not self.GENERIC_URL.search(value):
# if the URL is only composed of valid characters
if re.compile(
r"[A-Za-z0-9;/?:@&=+$,\-_\.!~*'\(\)%#]+$").match(value):
if self.GENERIC_URL_VALID.match(value):
# Then split up the URL into its components and check on
# the scheme
scheme = url_split_regex.match(value).group(2)
@@ -1432,11 +1433,10 @@ class IS_GENERIC_URL(Validator):
# ports, check to see if adding a valid scheme fixes
# the problem (but only do this if it doesn't have
# one already!)
if not re.compile('://').search(value) and None\
in self.allowed_schemes:
if value.find('://')<0 and None in self.allowed_schemes:
schemeToUse = self.prepend_scheme or 'http'
prependTest = self.__call__(schemeToUse
+ '://' + value)
prependTest = self.__call__(
schemeToUse + '://' + value)
# if the prepend test succeeded
if prependTest[1] is None:
# if prepending in the output is enabled
@@ -1791,6 +1791,9 @@ class IS_HTTP_URL(Validator):
"""
GENERIC_VALID_IP = re.compile("([\w.!~*'|;:&=+$,-]+@)?\d+\.\d+\.\d+\.\d+(:\d*)*$")
GENERIC_VALID_DOMAIN = re.compile("([\w.!~*'|;:&=+$,-]+@)?(([A-Za-z0-9]+[A-Za-z0-9\-]*[A-Za-z0-9]+\.)*([A-Za-z0-9]+\.)*)*([A-Za-z]+[A-Za-z0-9\-]*[A-Za-z0-9]+)\.?(:\d*)*$")
def __init__(
self,
error_message='enter a valid URL',
@@ -1843,16 +1846,12 @@ class IS_HTTP_URL(Validator):
# if there is an authority component
if authority:
# if authority is a valid IP address
if re.compile(
"([\w.!~*'|;:&=+$,-]+@)?\d+\.\d+\.\d+\.\d+(:\d*)*$").match(authority):
if self.GENERIC_VALID_IP.match(authority):
# Then this HTTP URL is valid
return (value, None)
else:
# else if authority is a valid domain name
domainMatch = \
re.compile(
"([\w.!~*'|;:&=+$,-]+@)?(([A-Za-z0-9]+[A-Za-z0-9\-]*[A-Za-z0-9]+\.)*([A-Za-z0-9]+\.)*)*([A-Za-z]+[A-Za-z0-9\-]*[A-Za-z0-9]+)\.?(:\d*)*$"
).match(authority)
domainMatch = self.GENERIC_VALID_DOMAIN.match(authority)
if domainMatch:
# if the top-level domain really exists
if domainMatch.group(5).lower()\
@@ -1865,13 +1864,13 @@ class IS_HTTP_URL(Validator):
path = componentsMatch.group(5)
# relative case: if this is a valid path (if it starts with
# a slash)
if re.compile('/').match(path):
if path.startswith('/'):
# Then this HTTP URL is valid
return (value, None)
else:
# abbreviated case: if we haven't already, prepend a
# scheme and see if it fixes the problem
if not re.compile('://').search(value):
if value.find('://')<0:
schemeToUse = self.prepend_scheme or 'http'
prependTest = self.__call__(schemeToUse
+ '://' + value)
@@ -2521,9 +2520,11 @@ class CLEANUP(Validator):
removes special characters on validation
"""
REGEX_CLEANUP = re.compile('[^\x09\x0a\x0d\x20-\x7e]')
def __init__(self, regex='[^\x09\x0a\x0d\x20-\x7e]'):
self.regex = re.compile(regex)
def __init__(self, regex=None):
self.regex = self.REGEX_CLEANUP if regex is None \
else re.compile(regex)
def __call__(self, value):
v = self.regex.sub('',str(value).strip())
@@ -2790,11 +2791,13 @@ class IS_STRONG(object):
class IS_IN_SUBSET(IS_IN_SET):
REGEX_W = re.compile('\w+')
def __init__(self, *a, **b):
IS_IN_SET.__init__(self, *a, **b)
def __call__(self, value):
values = re.compile("\w+").findall(str(value))
values = self.REGEX_W.findall(str(value))
failures = [x for x in values if IS_IN_SET.__call__(self, x)[1]]
if failures:
return (value, translate(self.error_message))

View File

@@ -26,10 +26,8 @@ administrator_email = 'you@localhost'
while 1:
for file in os.listdir(path):
filename = os.path.join(path, file)
if not ALLOW_DUPLICATES:
fileobj = open(filename, 'r')
fileobj = open(file, 'r')
try:
file_data = fileobj.read()
finally:
@@ -42,10 +40,10 @@ while 1:
hashes[key] = 1
error = RestrictedError()
error.load(request, request.application, filename)
error.load(request, request.application, file)
mail.send(to=administrator_email, subject='new web2py ticket', message=error.traceback)
os.unlink(filename)
os.unlink(os.path.join(path, file))
time.sleep(SLEEP_MINUTES * 60)