Merge ssh://github.com/web2py/web2py

This commit is contained in:
Michele Comitini
2013-08-12 10:06:01 +02:00
18 changed files with 634 additions and 345 deletions
+1 -1
View File
@@ -1 +1 @@
Version 2.6.0-development+timestamp.2013.08.05.02.33.02
Version 2.6.0-development+timestamp.2013.08.09.16.07.32
+1 -3
View File
@@ -491,13 +491,11 @@ def enable():
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(filename):
os.unlink(filename)
return SPAN(T('Disable'), _style='color:green')
else:
safe_open(filename, 'wb').write(time.ctime())
safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
return SPAN(T('Enable'), _style='color:red')
def peek():
""" Visualize object code """
app = get_app(request.vars.app)
+5
View File
@@ -16,6 +16,11 @@ if MULTI_USER_MODE and not is_manager():
session.flash = 'Not Authorized'
redirect(URL('default', 'site'))
from gluon.settings import settings
if not settings.is_source:
session.flash = 'Requires running web2py from source'
redirect(URL(request.application, 'default', 'site'))
forever = 10 ** 8
@@ -10,6 +10,10 @@ except ImportError:
session.flash = T('requires python-git, but not installed')
redirect(URL('default', 'site'))
from gluon.settings import settings
if not settings.is_source:
session.flash = 'Requires running web2py from source'
redirect(URL(request.application, 'default', 'site'))
def deploy():
apps = sorted(file for file in os.listdir(apath(r=request)))
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -24,6 +24,7 @@
<li><a target="_blank" href="http://www.onemewebservices.com">OneMeWebServices</a> (Canada)</li>
<li><a target="_blank" href="http://www.budgetbytes.nl">BudgetBytes</a> (The Netherlands)</li>
<li><a target="_blank" href="http://www.androsoft.pl">ANDROSoft</a> (Poland)</li>
<li><a target="_blank" href="http://itjp.net.br/">ITJP</a> (Brazil)</li>
<li><a target="_blank" href="http://emotionull.com">Emotionull</a> (Greece and Cyprus)</li>
<li><a target="_blank" href="http://zarealye.com/ca/Collect_Advantage">Zarealye, Ltd.</a> (Russia)</li>
<li><a target="_blank" href="http://www.vsa-services.com/">VSA Services</a> (Singapore)</li>
+23 -30
View File
@@ -1014,8 +1014,8 @@ def render(text,
# - is empty -> this is an <hr /> tag
# - consists '|' -> table
# - consists other characters -> blockquote
if ( lineno+1 >= strings_len or
not (s.count('-') == len(s) and len(s)>3) ):
if (lineno+1 >= strings_len or
not(s.count('-') == len(s) and len(s)>3)):
return (s, mtag, lineno)
lineno+=1
@@ -1034,7 +1034,8 @@ def render(text,
while lineno < strings_len:
s = strings[lineno].strip()
if s[:1] == '=':
if s.count('=')==len(s) and len(s)>3: # header or footer
# header or footer
if s.count('=')==len(s) and len(s)>3:
if not thead: # if thead list is empty:
thead = tout
else:
@@ -1054,16 +1055,16 @@ def render(text,
tr = '<tr class="even">'
else:
tr = '<tr class="first">' if rownum == 0 else '<tr>'
tout.append(tr+''.join(['<td%s>%s</td>'% \
(' class="num"'
if regex_num.match(f)
else '',
f.strip()
) for f in s.split('|')])+'</tr>'+pp)
tout.append(tr + ''.join(['<td%s>%s</td>' % (
' class="num"'
if regex_num.match(f) else '',
f.strip()
) for f in s.split('|')])+'</tr>'+pp)
rownum+=1
lineno+=1
t_cls = ' class="%s%s"'%(class_prefix, t_cls) if t_cls and t_cls != 'id' else ''
t_cls = ' class="%s%s"'%(class_prefix, t_cls) \
if t_cls and t_cls != 'id' else ''
t_id = ' id="%s%s"'%(id_prefix, t_id) if t_id else ''
s = ''
if thead:
@@ -1080,7 +1081,7 @@ def render(text,
else:
# parse blockquote:
bq_begin=lineno
t_mode = False # embidded table
t_mode = False # embedded table
t_cls = ''
t_id = ''
@@ -1090,13 +1091,15 @@ def render(text,
if not t_mode:
m = regex_tq.match(s)
if m:
if lineno+1 == strings_len or '|' not in strings[lineno+1]:
if (lineno+1 == strings_len or
'|' not in strings[lineno+1]):
t_cls = m.group('c') or ''
t_id = m.group('p') or ''
break
if regex_bq_headline.match(s):
if lineno+1 < strings_len and strings[lineno+1].strip():
if (lineno+1 < strings_len and
strings[lineno+1].strip()):
t_mode = True
lineno+=1
continue
@@ -1107,25 +1110,15 @@ def render(text,
lineno+=1
t_cls = ' class="%s%s"'%(class_prefix,t_cls) if t_cls and t_cls != 'id' else ''
t_id = ' id="%s%s"'%(id_prefix,t_id) if t_id else ''
t_cls = ' class="%s%s"'%(class_prefix,t_cls) \
if t_cls and t_cls != 'id' else ''
t_id = ' id="%s%s"'%(id_prefix,t_id) \
if t_id else ''
s = '<blockquote%s%s>%s</blockquote>%s' \
% (t_cls,
t_id,
render('\n'.join(strings[bq_begin:lineno]),
extra,
allowed,
'br',
URL,
environment,
latex,
autolinks,
protolinks,
class_prefix,
id_prefix,
pretty_print),
pp
)
'\n'.join(strings[bq_begin:lineno]),pp)
mtag='q'
else:
s = '<hr />'
@@ -1440,4 +1433,4 @@ if __name__ == '__main__':
print " file.markmin [file.css] - process file.markmin + built in file.css (optional)"
print " file.markmin [@path_to/css] - process file.markmin + link path_to/css (optional)"
run_doctests()
+110 -58
View File
@@ -194,7 +194,7 @@ CALLABLETYPES = (types.LambdaType, types.FunctionType,
TABLE_ARGS = set(
('migrate','primarykey','fake_migrate','format','redefine',
'singular','plural','trigger_name','sequence_name',
'singular','plural','trigger_name','sequence_name','fields',
'common_filter','polymodel','table_class','on_define','actual_name'))
SELECT_ARGS = set(
@@ -253,7 +253,7 @@ REGEX_TYPE = re.compile('^([\w\_\:]+)')
REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
REGEX_W = re.compile('^\w+$')
REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)(\.(?P<name>\w+))?\.\w+$')
REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
@@ -1076,16 +1076,18 @@ class BaseAdapter(ConnectionPool):
elif not key in sql_fields:
del sql_fields_current[key]
ftype = sql_fields_old[key]['type']
if self.dbengine in ('postgres',) and ftype.startswith('geometry'):
if (self.dbengine in ('postgres',) and
ftype.startswith('geometry')):
geotype, parms = ftype[:-1].split('(')
schema = parms.split(',')[0]
query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" %
query = [ "SELECT DropGeometryColumn ('%(schema)s', "+
"'%(table)s', '%(field)s');" %
dict(schema=schema, table=tablename, field=key,) ]
elif self.dbengine in ('firebird',):
query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
else:
query = ['ALTER TABLE %s DROP COLUMN %s;'
% (tablename, key)]
query = ['ALTER TABLE %s DROP COLUMN %s;' %
(tablename, key)]
metadata_change = True
elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
and not (key in table.fields and
@@ -1124,8 +1126,10 @@ class BaseAdapter(ConnectionPool):
self.log('faked!\n', table)
else:
self.execute(sub_query)
# Caveat: mysql, oracle and firebird do not allow multiple alter table
# in one transaction so we must commit partial transactions and
# Caveat: mysql, oracle and firebird
# do not allow multiple alter table
# in one transaction so we must commit
# partial transactions and
# update table._dbt after alter table.
if db._adapter.commit_on_alter_table:
db.commit()
@@ -1259,12 +1263,17 @@ class BaseAdapter(ConnectionPool):
return '(%s OR %s)' % (self.expand(first), self.expand(second))
def BELONGS(self, first, second):
if isinstance(second, str):
return '(%s IN (%s))' % (self.expand(first), second[:-1])
elif not second:
return '(1=0)'
items = ','.join(self.expand(item, first.type) for item in second)
return '(%s IN (%s))' % (self.expand(first), items)
if isinstance(second, str):
return '(%s IN (%s))' % (self.expand(first), second[:-1])
if not second:
return '(1=0)'
if isinstance(second, (list,tuple,frozenset)):
second = set(second) # remove duplicates, make mutable
if isinstance(second, set) and None in second:
second.remove(None)
return self.OR(self.EQ(first, None), self.BELONGS(first, second))
items = ','.join(self.expand(item, first.type) for item in second)
return '(%s IN (%s))' % (self.expand(first), items)
def REGEXP(self, first, second):
"regular expression operator"
@@ -4364,6 +4373,7 @@ class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
self.pool_size = pool_size
self.db_codec = db_codec
self._after_connection = after_connection
if do_connect: self.find_driver(adapter_args, uri)
self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
os.sep+'applications'+os.sep,1)[1])
ruri = uri.split("://")[1]
@@ -4394,6 +4404,10 @@ class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
def execute(self, command, *a, **b):
return self.log_execute(command.decode('utf8'), *a, **b)
def find_driver(self,adapter_args,uri=None):
self.adapter_args = adapter_args
self.driver = "google"
class NoSQLAdapter(BaseAdapter):
can_select_for_update = False
@@ -5346,10 +5360,12 @@ class MongoDBAdapter(NoSQLAdapter):
d = datetime.date(2000, 1, 1)
# mongodb doesn't has a time object and so it must datetime,
# string or integer
return datetime.datetime.combine(d, value)
return datetime.datetime.combine(d, value)
elif fieldtype == "blob":
from bson import Binary
return Binary(value)
if not isinstance(value, Binary):
return Binary(value)
return value
elif (isinstance(fieldtype, basestring) and
fieldtype.startswith('list:')):
if fieldtype.startswith('list:reference'):
@@ -7760,6 +7776,8 @@ def index():
*fields,
**args
):
if not fields and 'fields' in args:
fields = args.get('fields',())
if not isinstance(tablename,str):
raise SyntaxError("missing table name")
elif hasattr(self,tablename) or tablename in self.tables:
@@ -8240,24 +8258,27 @@ class Table(object):
fieldnames.add('id')
self._id = field
virtual_fields = []
def include_new(field):
newfields.append(field)
fieldnames.add(field.name)
if field.type=='id':
self._id = field
for field in fields:
if isinstance(field, (FieldMethod, FieldVirtual)):
virtual_fields.append(field)
virtual_fields.append(field)
elif isinstance(field, Field) and not field.name in fieldnames:
if field.db is not None:
field = copy.copy(field)
newfields.append(field)
fieldnames.add(field.name)
if field.type=='id':
self._id = field
field = copy.copy(field)
include_new(field)
elif isinstance(field, dict) and 'fieldname' and \
not field['fieldname'] in fieldnames:
include_new(Field(**field))
elif isinstance(field, Table):
table = field
for field in table:
if not field.name in fieldnames and not field.type=='id':
t2 = not table._actual and self._tablename
field = field.clone(point_self_references_to=t2)
newfields.append(field)
fieldnames.add(field.name)
include_new(field.clone(point_self_references_to=t2))
elif not isinstance(field, (Field, Table)):
raise SyntaxError(
'define_table argument is not a Field or Table: %s' % field)
@@ -9431,9 +9452,6 @@ class Field(Expression):
raise http.HTTP(404)
if self.authorize and not self.authorize(row):
raise http.HTTP(403)
m = REGEX_UPLOAD_PATTERN.match(name)
if not m or not self.isattachment:
raise TypeError('Can\'t retrieve %s' % name)
file_properties = self.retrieve_file_properties(name,path)
filename = file_properties['filename']
if isinstance(self_uploadfield, str): # ## if file is in DB
@@ -9457,35 +9475,35 @@ class Field(Expression):
return (filename, stream)
def retrieve_file_properties(self, name, path=None):
m = REGEX_UPLOAD_PATTERN.match(name)
if not m or not self.isattachment:
raise TypeError('Can\'t retrieve %s file properties' % name)
self_uploadfield = self.uploadfield
if self.custom_retrieve_file_properties:
return self.custom_retrieve_file_properties(name, path)
try:
m = REGEX_UPLOAD_PATTERN.match(name)
if not m or not self.isattachment:
raise TypeError('Can\'t retrieve %s file properties' % name)
filename = base64.b16decode(m.group('name'), True)
filename = REGEX_CLEANUP_FN.sub('_', filename)
except (TypeError, AttributeError):
filename = name
if isinstance(self_uploadfield, str): # ## if file is in DB
return dict(path=None,filename=filename)
elif isinstance(self_uploadfield,Field):
return dict(path=None,filename=filename)
if m.group('name'):
try:
filename = base64.b16decode(m.group('name'), True)
filename = REGEX_CLEANUP_FN.sub('_', filename)
except (TypeError, AttributeError):
filename = name
else:
# ## if file is on filesystem
if path:
pass
elif self.uploadfolder:
filename = name
# ## if file is in DB
if isinstance(self_uploadfield, (str, Field)):
return dict(path=None,filename=filename)
# ## if file is on filesystem
if not path:
if self.uploadfolder:
path = self.uploadfolder
else:
path = pjoin(self.db._adapter.folder, '..', 'uploads')
if self.uploadseparate:
t = m.group('table')
f = m.group('field')
u = m.group('uuidkey')
path = pjoin(path,"%s.%s" % (t,f),u[:2])
return dict(path=path,filename=filename)
if self.uploadseparate:
t = m.group('table')
f = m.group('field')
u = m.group('uuidkey')
path = pjoin(path,"%s.%s" % (t,f),u[:2])
return dict(path=path,filename=filename)
def formatter(self, value):
@@ -10312,22 +10330,56 @@ class Rows(object):
rows.records = sorted(self,key=f,reverse=reverse)
return rows
def group_by_value(self, field):
def group_by_value(self, *fields, **args):
"""
regroups the rows, by one of the fields
"""
one_result = False
if 'one_result' in args:
one_result = args['one_result']
def build_fields_struct(row, fields, num, groups):
''' helper function:
'''
if num > len(fields)-1:
if one_result:
return row
else:
return [row]
key = fields[num]
value = row[key]
if value not in groups:
groups[value] = build_fields_struct(row, fields, num+1, {})
else:
struct = build_fields_struct(row, fields, num+1, groups[ value ])
# still have more grouping to do
if type(struct) == type(dict()):
groups[value].update()
# no more grouping, first only is off
elif type(struct) == type(list()):
groups[value] += struct
# no more grouping, first only on
else:
groups[value] = struct
return groups
if len(fields) == 0:
return self
# if select returned no results
if not self.records:
return {}
key = str(field)
grouped_row_group = dict()
# build the struct
for row in self:
value = row[key]
if not value in grouped_row_group:
grouped_row_group[value] = [row]
else:
grouped_row_group[value].append(row)
build_fields_struct(row, fields, 0, grouped_row_group)
return grouped_row_group
def render(self, i=None, fields=None):
+166 -5
View File
@@ -36,6 +36,11 @@ import os
import sys
import traceback
import threading
import cgi
import copy
import tempfile
from cache import CacheInRam
from fileutils import copystream
FMT = '%a, %d-%b-%Y %H:%M:%S PST'
PAST = 'Sat, 1-Jan-1971 00:00:00'
@@ -47,6 +52,15 @@ try:
except ImportError:
have_minify = False
try:
import simplejson as sj #external installed library
except:
try:
import json as sj #standard installed library
except:
import contrib.simplejson as sj #pure python library
regex_session_id = re.compile('^([\w\-]+/)?[\w\-\.]+$')
__all__ = ['Request', 'Response', 'Session']
@@ -62,6 +76,52 @@ css_inline = '<style type="text/css">\n%s\n</style>'
js_inline = '<script type="text/javascript">\n%s\n</script>'
def copystream_progress(request, chunk_size=10 ** 5):
"""
copies request.env.wsgi_input into request.body
and stores progress upload status in cache_ram
X-Progress-ID:length and X-Progress-ID:uploaded
"""
env = request.env
if not env.content_length:
return cStringIO.StringIO()
source = env.wsgi_input
try:
size = int(env.content_length)
except ValueError:
raise HTTP(400, "Invalid Content-Length header")
try: # Android requires this
dest = tempfile.NamedTemporaryFile()
except NotImplementedError: # and GAE this
dest = tempfile.TemporaryFile()
if not 'X-Progress-ID' in request.vars:
copystream(source, dest, size, chunk_size)
return dest
cache_key = 'X-Progress-ID:' + request.vars['X-Progress-ID']
cache_ram = CacheInRam(request) # same as cache.ram because meta_storage
cache_ram(cache_key + ':length', lambda: size, 0)
cache_ram(cache_key + ':uploaded', lambda: 0, 0)
while size > 0:
if size < chunk_size:
data = source.read(size)
cache_ram.increment(cache_key + ':uploaded', size)
else:
data = source.read(chunk_size)
cache_ram.increment(cache_key + ':uploaded', chunk_size)
length = len(data)
if length > size:
(data, length) = (data[:size], size)
size -= length
if length == 0:
break
dest.write(data)
if length < chunk_size:
break
dest.seek(0)
cache_ram(cache_key + ':length', None)
cache_ram(cache_key + ':uploaded', None)
return dest
class Request(Storage):
"""
@@ -81,13 +141,15 @@ class Request(Storage):
- restful()
"""
def __init__(self):
def __init__(self, env):
Storage.__init__(self)
self.env = Storage()
self.env = Storage(env)
self.env.web2py_path = global_settings.applications_parent
self.env.update(global_settings)
self.cookies = Cookie.SimpleCookie()
self.get_vars = Storage()
self.post_vars = Storage()
self.vars = Storage()
self._get_vars = None
self._post_vars = None
self._vars = None
self.folder = None
self.application = None
self.function = None
@@ -100,6 +162,105 @@ class Request(Storage):
self.is_local = False
self.global_settings = settings.global_settings
def parse_get_vars(self):
query_string = self.env.get('QUERY_STRING','')
dget = cgi.parse_qs(query_string, keep_blank_values=1)
get_vars = self._get_vars = Storage(dget)
for (key, value) in get_vars.iteritems():
if isinstance(value,list) and len(value)==1:
get_vars[key] = value[0]
def parse_post_vars(self):
env = self.env
post_vars = self._post_vars = Storage()
try:
self.body = body = copystream_progress(self)
except IOError:
raise HTTP(400, "Bad Request - HTTP body is incomplete")
#if content-type is application/json, we must read the body
is_json = env.get('content_type', '')[:16] == 'application/json'
if is_json:
try:
json_vars = sj.load(body)
except:
# incoherent request bodies can still be parsed "ad-hoc"
json_vars = {}
pass
# update vars and get_vars with what was posted as json
if isinstance(json_vars, dict):
post_vars.update(json_vars)
body.seek(0)
# parse POST variables on POST, PUT, BOTH only in post_vars
if (body and
env.request_method in ('POST', 'PUT', 'DELETE', 'BOTH') and
not is_json):
dpost = cgi.FieldStorage(fp=body, environ=env, keep_blank_values=1)
post_vars.update(dpost)
# The same detection used by FieldStorage to detect multipart POSTs
is_multipart = dpost.type[:10] == 'multipart/'
body.seek(0)
def listify(a):
return (not isinstance(a, list) and [a]) or a
try:
keys = sorted(dpost)
except TypeError:
keys = []
for key in keys:
if key is None:
continue # not sure why cgi.FieldStorage returns None key
dpk = dpost[key]
# if an element is not a file replace it with its value else leave it alone
if isinstance(dpk, list):
value = []
for _dpk in dpk:
if not _dpk.filename:
value.append(_dpk.value)
else:
value.append(_dpk)
elif not dpk.filename:
value = dpk.value
else:
value = dpk
pvalue = listify(value)
if len(pvalue):
post_vars[key] = (len(pvalue) > 1 and pvalue) or pvalue[0]
def parse_all_vars(self):
self._vars = copy.copy(self.get_vars)
for key,value in self.post_vars.iteritems():
if not key in self._vars:
self._vars[key] = value
else:
if not isinstance(self._vars[key],list):
self._vars[key] = [self._vars[key]]
self._vars[key] += value if isinstance(value,list) else [value]
@property
def get_vars(self):
"lazily parse the query string into get_vars"
if self._get_vars is None:
self.parse_get_vars()
return self._get_vars
@property
def post_vars(self):
"lazily parse the body into post_vars"
if self._post_vars is None:
self.parse_post_vars()
return self._post_vars
@property
def vars(self):
"lazily parse all get_vars and post_vars to fill vars"
if self._vars is None:
self.parse_all_vars()
return self._vars
def compute_uuid(self):
self.uuid = '%s/%s.%s.%s' % (
self.application,
+40 -163
View File
@@ -14,7 +14,6 @@ Contains:
if False: import import_all # DO NOT REMOVE PART OF FREEZE PROCESS
import gc
import cgi
import cStringIO
import Cookie
import os
@@ -25,11 +24,10 @@ import time
import datetime
import signal
import socket
import tempfile
import random
import string
import urllib2
try:
import simplejson as sj #external installed library
except:
@@ -40,7 +38,7 @@ except:
from thread import allocate_lock
from fileutils import abspath, write_file, parse_version, copystream
from fileutils import abspath, write_file
from settings import global_settings
from admin import add_path_first, create_missing_folders, create_missing_app_folders
from globals import current
@@ -97,9 +95,7 @@ from compileapp import build_environment, run_models_in, \
run_controller_in, run_view_in
from contenttype import contenttype
from dal import BaseAdapter
from settings import global_settings
from validators import CRYPT
from cache import CacheInRam
from html import URL, xmlescape
from utils import is_valid_ip_address, getipaddrinfo
from rewrite import load, url_in, THREAD_LOCAL as rwthread, \
@@ -143,10 +139,11 @@ def get_client(env):
first tries 'http_x_forwarded_for', secondly 'remote_addr'
if all fails, assume '127.0.0.1' or '::1' (running locally)
"""
g = regex_client.search(env.get('http_x_forwarded_for', ''))
eget = env.get
g = regex_client.search(eget('http_x_forwarded_for', ''))
client = (g.group() or '').split(',')[0] if g else None
if client in (None, '', 'unknown'):
g = regex_client.search(env.get('remote_addr', ''))
g = regex_client.search(eget('remote_addr', ''))
if g:
client = g.group()
elif env.http_host.startswith('['): # IPv6
@@ -158,51 +155,6 @@ def get_client(env):
return client
def copystream_progress(request, chunk_size=10 ** 5):
"""
copies request.env.wsgi_input into request.body
and stores progress upload status in cache_ram
X-Progress-ID:length and X-Progress-ID:uploaded
"""
env = request.env
if not env.content_length:
return cStringIO.StringIO()
source = env.wsgi_input
try:
size = int(env.content_length)
except ValueError:
raise HTTP(400, "Invalid Content-Length header")
try: # Android requires this
dest = tempfile.NamedTemporaryFile()
except NotImplementedError: # and GAE this
dest = tempfile.TemporaryFile()
if not 'X-Progress-ID' in request.vars:
copystream(source, dest, size, chunk_size)
return dest
cache_key = 'X-Progress-ID:' + request.vars['X-Progress-ID']
cache_ram = CacheInRam(request) # same as cache.ram because meta_storage
cache_ram(cache_key + ':length', lambda: size, 0)
cache_ram(cache_key + ':uploaded', lambda: 0, 0)
while size > 0:
if size < chunk_size:
data = source.read(size)
cache_ram.increment(cache_key + ':uploaded', size)
else:
data = source.read(chunk_size)
cache_ram.increment(cache_key + ':uploaded', chunk_size)
length = len(data)
if length > size:
(data, length) = (data[:size], size)
size -= length
if length == 0:
break
dest.write(data)
if length < chunk_size:
break
dest.seek(0)
cache_ram(cache_key + ':length', None)
cache_ram(cache_key + ':uploaded', None)
return dest
def serve_controller(request, response, session):
@@ -282,129 +234,40 @@ class LazyWSGI(object):
def start_response(self,status='200', headers=[], exec_info=None):
"""
in controller you can use::
- request.wsgi.environ
- request.wsgi.start_response
to call third party WSGI applications
"""
self.response.status = str(status).split(' ', 1)[0]
self.response.headers = dict(headers)
return lambda *args, **kargs: \
self.response.write(escape=False, *args, **kargs)
def middleware(self,*a):
def middleware(self,*middleware_apps):
"""
In you controller use::
@request.wsgi.middleware(middleware1, middleware2, ...)
to decorate actions with WSGI middleware. actions must return strings.
uses a simulated environment so it may have weird behavior in some cases
"""
def middleware(f):
def app(environ, start_response):
data = f()
start_response(self.response.status,
start_response(self.response.status,
self.response.headers.items())
if isinstance(data, list):
return data
return [data]
for item in middleware_apps:
app = item(app)
def caller(app):
def caller(app):
return app(self.environ, self.start_response)
return lambda caller=caller, app=app: caller(app)
return middleware
ISLE25 = sys.version_info[1] <= 5
def parse_get_post_vars(request, environ):
# always parse variables in URL for GET, POST, PUT, DELETE, etc. in get_vars
env = request.env
dget = cgi.parse_qsl(env.query_string or '', keep_blank_values=1)
for (key, value) in dget:
if key in request.get_vars:
if isinstance(request.get_vars[key], list):
request.get_vars[key] += [value]
else:
request.get_vars[key] = [request.get_vars[key]] + [value]
else:
request.get_vars[key] = value
request.vars[key] = request.get_vars[key]
try:
request.body = body = copystream_progress(request)
except IOError:
raise HTTP(400, "Bad Request - HTTP body is incomplete")
#if content-type is application/json, we must read the body
is_json = env.get('http_content_type', '')[:16] == 'application/json'
if is_json:
try:
json_vars = sj.load(body)
body.seek(0)
except:
# incoherent request bodies can still be parsed "ad-hoc"
json_vars = {}
pass
# update vars and get_vars with what was posted as json
if isinstance(json_vars,dict):
request.get_vars.update(json_vars)
request.vars.update(json_vars)
# parse POST variables on POST, PUT, BOTH only in post_vars
if (body and env.request_method in ('POST', 'PUT', 'DELETE', 'BOTH')):
dpost = cgi.FieldStorage(fp=body, environ=environ, keep_blank_values=1)
# The same detection used by FieldStorage to detect multipart POSTs
is_multipart = dpost.type[:10] == 'multipart/'
body.seek(0)
def listify(a):
return (not isinstance(a, list) and [a]) or a
try:
keys = sorted(dpost)
except TypeError:
keys = []
for key in keys:
if key is None:
continue # not sure why cgi.FieldStorage returns None key
dpk = dpost[key]
# if en element is not a file replace it with its value else leave it alone
if isinstance(dpk, list):
value = []
for _dpk in dpk:
if not _dpk.filename:
value.append(_dpk.value)
else:
value.append(_dpk)
elif not dpk.filename:
value = dpk.value
else:
value = dpk
pvalue = listify(value)
if key in request.vars:
gvalue = listify(request.vars[key])
if ISLE25:
value = pvalue + gvalue
elif is_multipart:
pvalue = pvalue[len(gvalue):]
else:
pvalue = pvalue[:-len(gvalue)]
request.vars[key] = value
if len(pvalue):
request.post_vars[key] = (len(pvalue) >
1 and pvalue) or pvalue[0]
if is_json and isinstance(json_vars,dict):
# update post_vars with what was posted as json
request.post_vars.update(json_vars)
def wsgibase(environ, responder):
"""
this is the gluon wsgi application. the first function called when a page
@@ -435,15 +298,15 @@ def wsgibase(environ, responder):
[a-zA-Z0-9_]
- file and sub may also contain '-', '=', '.' and '/'
"""
eget = environ.get
current.__dict__.clear()
request = Request()
request = Request(environ)
response = Response()
session = Session()
env = request.env
env.web2py_path = global_settings.applications_parent
#env.web2py_path = global_settings.applications_parent
env.web2py_version = web2py_version
env.update(global_settings)
#env.update(global_settings)
static_file = False
try:
try:
@@ -462,8 +325,7 @@ def wsgibase(environ, responder):
response.status = env.web2py_status_code or response.status
if static_file:
if environ.get('QUERY_STRING', '').startswith(
'attachment'):
if eget('QUERY_STRING', '').startswith('attachment'):
response.headers['Content-Disposition'] \
= 'attachment'
if version:
@@ -472,6 +334,7 @@ def wsgibase(environ, responder):
'Expires'] = 'Thu, 31 Dec 2037 23:59:59 GMT'
response.stream(static_file, request=request)
# ##################################################
# fill in request items
# ##################################################
@@ -485,7 +348,7 @@ def wsgibase(environ, responder):
local_hosts.add(socket.gethostname())
local_hosts.add(fqdn)
local_hosts.update([
addrinfo[4][0] for addrinfo
addrinfo[4][0] for addrinfo
in getipaddrinfo(fqdn)])
if env.server_name:
local_hosts.add(env.server_name)
@@ -508,14 +371,16 @@ def wsgibase(environ, responder):
is_local = env.remote_addr in local_hosts,
is_https = env.wsgi_url_scheme in HTTPS_SCHEMES or \
request.env.http_x_forwarded_proto in HTTPS_SCHEMES \
or env.https == 'on')
or env.https == 'on'
)
request.compute_uuid() # requires client
request.url = environ['PATH_INFO']
# ##################################################
# access the requested application
# ##################################################
disabled = pjoin(request.folder, 'DISABLED')
if not exists(request.folder):
if app == rwthread.routes.default_application \
and app != 'welcome':
@@ -530,9 +395,15 @@ def wsgibase(environ, responder):
raise HTTP(404, rwthread.routes.error_message
% 'invalid request',
web2py_error='invalid application')
elif not request.is_local and \
exists(pjoin(request.folder, 'DISABLED')):
raise HTTP(503, "<html><body><h1>Temporarily down for maintenance</h1></body></html>")
elif request.is_local and exists(disabled):
data = dict([item.strip() for item in line.split(':',1)]
for line in open(disabled) if line.strip())
if data.get('disabled','True').lower() != 'false':
if 'redirect' in data:
redirect(data['redirect'])
if 'message' in data:
raise HTTP(503, data['message'])
raise HTTP(503, "<html><body><h1>Temporarily down for maintenance</h1></body></html>")
# ##################################################
# build missing folders
@@ -544,7 +415,7 @@ def wsgibase(environ, responder):
# get the GET and POST data
# ##################################################
parse_get_post_vars(request, environ)
#parse_get_post_vars(request, environ)
# ##################################################
# expose wsgi hooks for convenience
@@ -643,11 +514,17 @@ def wsgibase(environ, responder):
# on application error, rollback database
# ##################################################
ticket = e.log(request) or 'unknown'
# log tickets before rollback if not in DB
if not request.tickets_db:
ticket = e.log(request) or 'unknown'
# rollback
if response._custom_rollback:
response._custom_rollback()
else:
BaseAdapter.close_all_instances('rollback')
# if tickets in db, reconnect and store it in db
if request.tickets_db:
ticket = e.log(request) or 'unknown'
http_response = \
HTTP(500, rwthread.routes.error_message_ticket %
+16 -11
View File
@@ -48,11 +48,19 @@ class TicketStorage(Storage):
self._store_on_disk(request, ticket_id, ticket_data)
def _store_in_db(self, request, ticket_id, ticket_data):
table = self._get_table(self.db, self.tablename, request.application)
table.insert(ticket_id=ticket_id,
ticket_data=cPickle.dumps(ticket_data),
created_datetime=request.now)
logger.error('In FILE: %(layer)s\n\n%(traceback)s\n' % ticket_data)
self.db._adapter.reconnect()
try:
table = self._get_table(self.db, self.tablename, request.application)
id = table.insert(ticket_id=ticket_id,
ticket_data=cPickle.dumps(ticket_data),
created_datetime=request.now)
self.db.commit()
message = 'In FILE: %(layer)s\n\n%(traceback)s\n'
except Exception, e:
self.db.rollback()
message =' Unable to store in FILE: %(layer)s\n\n%(traceback)s\n'
self.db.close()
logger.error(message % ticket_data)
def _store_on_disk(self, request, ticket_id, ticket_data):
ef = self._error_file(request, ticket_id, 'wb')
@@ -71,16 +79,13 @@ class TicketStorage(Storage):
def _get_table(self, db, tablename, app):
tablename = tablename + '_' + app
table = db.get(tablename, None)
if table is None:
db.rollback() # not necessary but one day
# any app may store tickets on DB
table = db.get(tablename)
if not table:
table = db.define_table(
tablename,
db.Field('ticket_id', length=100),
db.Field('ticket_data', 'text'),
db.Field('created_datetime', 'datetime'),
)
db.Field('created_datetime', 'datetime'))
return table
def load(
+3
View File
@@ -36,3 +36,6 @@ global_settings.is_jython = \
'java' in sys.platform.lower() or \
hasattr(sys, 'JYTHON_JAR') or \
str(sys.copyright).find('Jython') > 0
global_settings.is_source = os.path.exists(os.path.join(
global_settings.gluon_parent,'web2py.py'))
+49 -22
View File
@@ -24,7 +24,7 @@ from utils import web2py_uuid
from compileapp import build_environment, read_pyc, run_models_in
from restricted import RestrictedError
from globals import Request, Response, Session
from storage import Storage
from storage import Storage, List
from admin import w2p_unpack
from dal import BaseAdapter
@@ -38,7 +38,7 @@ def enable_autocomplete_and_history(adir,env):
except ImportError:
pass
else:
readline.parse_and_bind("bind ^I rl_complete"
readline.parse_and_bind("bind ^I rl_complete"
if sys.platform == 'darwin'
else "tab: complete")
history_file = os.path.join(adir,'.pythonhistory')
@@ -71,7 +71,7 @@ def exec_environment(
"""
if request is None:
request = Request()
request = Request({})
if response is None:
response = Response()
if session is None:
@@ -116,7 +116,7 @@ def env(
web2py environment.
"""
request = Request()
request = Request({})
response = Response()
session = Session()
request.application = a
@@ -131,13 +131,21 @@ def env(
request.function = f or 'index'
response.view = '%s/%s.html' % (request.controller,
request.function)
request.env.path_info = '/%s/%s/%s' % (a, c, f)
request.env.http_host = '127.0.0.1:8000'
request.env.remote_addr = '127.0.0.1'
request.env.web2py_runtime_gae = global_settings.web2py_runtime_gae
for k, v in extra_request.items():
request[k] = v
path_info = '/%s/%s/%s' % (a, c, f)
if request.args:
path_info = '%s/%s' % (path_info, '/'.join(request.args))
if request.vars:
vars = ['%s=%s' % (k,v) if v else '%s' % k
for (k,v) in request.vars.iteritems()]
path_info = '%s?%s' % (path_info, '&'.join(vars))
request.env.path_info = path_info
# Monkey patch so credentials checks pass.
@@ -178,7 +186,8 @@ def run(
import_models=False,
startfile=None,
bpython=False,
python_code=False):
python_code=False,
cronjob=False):
"""
Start interactive shell or run Python script (startfile) in web2py
controller environment. appname is formatted like:
@@ -187,7 +196,7 @@ def run(
a/c exec the controller c into the application environment
"""
(a, c, f) = parse_path_info(appname)
(a, c, f, args, vars) = parse_path_info(appname, av=True)
errmsg = 'invalid application name: %s' % appname
if not a:
die(errmsg)
@@ -219,18 +228,23 @@ def run(
if c:
import_models = True
_env = env(a, c=c, f=f, import_models=import_models)
extra_request = {}
if args:
extra_request['args'] = args
if vars:
extra_request['vars'] = vars
_env = env(a, c=c, f=f, import_models=import_models, extra_request=extra_request)
if c:
cfile = os.path.join('applications', a, 'controllers', c + '.py')
if not os.path.isfile(cfile):
cfile = os.path.join('applications', a, 'compiled',
pyfile = os.path.join('applications', a, 'controllers', c + '.py')
pycfile = os.path.join('applications', a, 'compiled',
"controllers_%s_%s.pyc" % (c, f))
if not os.path.isfile(cfile):
die(errmsg)
else:
exec read_pyc(cfile) in _env
if ((cronjob and os.path.isfile(pycfile))
or not os.path.isfile(pyfile)):
exec read_pyc(pycfile) in _env
elif os.path.isfile(pyfile):
execfile(pyfile, _env)
else:
execfile(cfile, _env)
die(errmsg)
if f:
exec ('print %s()' % f, _env)
@@ -294,13 +308,25 @@ def run(
code.interact(local=_env)
def parse_path_info(path_info):
def parse_path_info(path_info, av=False):
"""
Parse path info formatted like a/c/f where c and f are optional
and a leading / accepted.
Return tuple (a, c, f). If invalid path_info a is set to None.
If c or f are omitted they are set to None.
If av=True, parse args and vars
"""
if av:
vars = None
if '?' in path_info:
path_info, query = path_info.split('?', 2)
vars = Storage()
for var in query.split('&'):
(var, val) = var.split('=', 2) if '=' in var else (var, None)
vars[var] = val
items = List(path_info.split('/'))
args = List(items[3:]) if len(items) > 3 else None
return (items(0), items(1), items(2), args, vars)
mo = re.match(r'^/?(?P<a>\w+)(/(?P<c>\w+)(/(?P<f>\w+))?)?$',
path_info)
@@ -368,9 +394,10 @@ def test(testpath, import_models=True, verbose=False):
globs = env(a, c=c, f=f, import_models=import_models)
execfile(testfile, globs)
doctest.run_docstring_examples(obj, globs=globs,
name='%s: %s' % (os.path.basename(testfile),
name), verbose=verbose)
doctest.run_docstring_examples(
obj, globs=globs,
name='%s: %s' % (os.path.basename(testfile),
name), verbose=verbose)
if type(obj) in (types.TypeType, types.ClassType):
for attr_name in dir(obj):
@@ -398,8 +425,8 @@ def execute_from_command_line(argv=None):
parser = optparse.OptionParser(usage=get_usage())
parser.add_option('-S', '--shell', dest='shell', metavar='APPNAME',
help='run web2py in interactive shell or IPython(if installed) ' +
'with specified appname')
help='run web2py in interactive shell ' +
'or IPython(if installed) with specified appname')
msg = 'run web2py in interactive shell or bpython (if installed) with'
msg += ' specified appname (if app does not exist it will be created).'
msg += '\n Use combined with --shell'
+18 -11
View File
@@ -26,7 +26,7 @@ from html import FORM, INPUT, LABEL, OPTION, SELECT
from html import TABLE, THEAD, TBODY, TR, TD, TH, STYLE
from html import URL, truncate_string, FIELDSET
from dal import DAL, Field, Table, Row, CALLABLETYPES, smart_query, \
bar_encode, Reference, REGEX_TABLE_DOT_FIELD
bar_encode, Reference, REGEX_TABLE_DOT_FIELD, Expression
from storage import Storage
from utils import md5_hash
from validators import IS_EMPTY_OR, IS_NOT_EMPTY, IS_LIST_OF, IS_DATE, \
@@ -1454,7 +1454,7 @@ class SQLFORM(FORM):
(cStringIO.StringIO(f), 'file.txt')
else:
# this should never happen, why does it happen?
print 'f=',repr(f)
#print 'f=',repr(f)
continue
newfilename = field.store(source_file, original_filename,
field.uploadfolder)
@@ -1822,10 +1822,11 @@ class SQLFORM(FORM):
session = current.session
response = current.response
logged = session.auth and session.auth.user
wenabled = (not user_signature or logged)
wenabled = (not user_signature or logged) and not groupby
create = wenabled and create
editable = wenabled and editable
deletable = wenabled and deletable
details = details and not groupby
rows = None
def fetch_count(dbset):
@@ -1835,9 +1836,10 @@ class SQLFORM(FORM):
if groupby:
c = 'count(*)'
nrows = db.executesql(
'select count(*) from (%s);' %
'select count(*) from (%s) _tmp;' %
dbset._select(c, left=left, cacheable=True,
groupby=groupby, cache=cache_count)[:-1])[0][0]
groupby=groupby,
cache=cache_count)[:-1])[0][0]
elif left:
c = 'count(*)'
nrows = dbset.select(c, left=left, cacheable=True, cache=cache_count).first()[c]
@@ -1930,11 +1932,16 @@ class SQLFORM(FORM):
columns.append(f)
fields.append(f)
if not field_id:
field_id = tables[0]._id
if not any(str(f)==str(field_id) for f in fields):
fields = [f for f in fields]+[field_id]
if groupby is None:
field_id = tables[0]._id
elif groupby and isinstance(groupby, Field):
field_id = groupby #take the field passed as groupby
elif groupby and isinstance(groupby, Expression):
field_id = groupby.first #take the first groupby field
table = field_id.table
tablename = table._tablename
if not any(str(f)==str(field_id) for f in fields):
fields = [f for f in fields]+[field_id]
if upload == '<default>':
upload = lambda filename: url(args=['download', filename])
if request.args(-2) == 'download':
@@ -2175,8 +2182,8 @@ class SQLFORM(FORM):
order = request.vars.order or ''
if sortable:
if order and not order == 'None':
tablename, fieldname = order.split('~')[-1].split('.', 1)
sort_field = db[tablename][fieldname]
otablename, ofieldname = order.split('~')[-1].split('.', 1)
sort_field = db[otablename][ofieldname]
exception = sort_field.type in ('date', 'datetime', 'time')
if exception:
orderby = (order[:1] == '~' and sort_field) or ~sort_field
@@ -2628,7 +2635,7 @@ class SQLFORM(FORM):
if isinstance(item,Table) and item._tablename in check:
tablename = item._tablename
linked_fieldnames = check[tablename]
td = item
td = item
elif isinstance(item,str) and item in check:
tablename = item
linked_fieldnames = check[item]
+68 -25
View File
@@ -1154,9 +1154,6 @@ class Auth(object):
del session.auth
# ## what happens after login?
self.next = current.request.vars._next
if isinstance(self.next, (list, tuple)):
self.next = self.next[0]
url_index = URL(controller, 'index')
url_login = URL(controller, function, args='login')
# ## what happens after registration?
@@ -1235,6 +1232,12 @@ class Auth(object):
else:
self.signature = None
def get_vars_next(self):
next = current.request.vars._next
if isinstance(next, (list, tuple)):
next = next[0]
return next
def _get_user_id(self):
"accessor for auth.user_id"
return self.user and self.user.id or None
@@ -1756,7 +1759,8 @@ class Auth(object):
description=str(description % vars),
origin=origin, user_id=user_id)
def get_or_create_user(self, keys, update_fields=['email'], login=True):
def get_or_create_user(self, keys, update_fields=['email'],
login=True, get=True):
"""
Used for alternate login methods:
If the user exists already then password is updated.
@@ -1786,6 +1790,9 @@ class Auth(object):
and ('registration_id' not in keys or user.registration_id != str(keys['registration_id'])):
user = None # THINK MORE ABOUT THIS? DO WE TRUST OPENID PROVIDER?
if user:
if not get:
# added for register_bare to avoid overwriting users
return None
update_keys = dict(registration_id=keys['registration_id'])
for key in update_fields:
if key in keys:
@@ -1874,10 +1881,7 @@ class Auth(object):
self.user = user
self.update_groups()
def login_bare(self, username, password):
"""
logins user as specified by usernname (or email) and password
"""
def _get_login_settings(self):
table_user = self.table_user()
if self.settings.login_userfield:
userfield = self.settings.login_userfield
@@ -1886,20 +1890,58 @@ class Auth(object):
else:
userfield = 'email'
passfield = self.settings.password_field
user = self.db(table_user[userfield] == username).select().first()
if user and user.get(passfield, False):
password = table_user[passfield].validate(password)[0]
if not user.registration_key and password == user[passfield]:
return Storage({"table_user": table_user,
"userfield": userfield,
"passfield": passfield})
def login_bare(self, username, password):
"""
logins user as specified by username (or email) and password
"""
settings = self._get_login_settings()
user = self.db(settings.table_user[settings.userfield] == \
username).select().first()
if user and user.get(settings.passfield, False):
password = settings.table_user[
settings.passfield].validate(password)[0]
if not user.registration_key and password == \
user[settings.passfield]:
self.login_user(user)
return user
else:
# user not in database try other login methods
for login_method in self.settings.login_methods:
if login_method != self and login_method(username, password):
if login_method != self and \
login_method(username, password):
self.user = username
return username
return False
def register_bare(self, **fields):
"""
registers a user as specified by username (or email)
and a raw password.
"""
settings = self._get_login_settings()
if not fields.get(settings.passfield):
raise ValueError("register_bare: " +
"password not provided or invalid")
elif not fields.get(settings.userfield):
raise ValueError("register_bare: " +
"userfield not provided or invalid")
fields[settings.passfield
] = settings.table_user[settings.passfield].validate(
fields[settings.passfield])[0]
user = self.get_or_create_user(fields, login=False,
get=False,
update_fields=self.settings.update_fields)
if not user:
# get or create did not create a user (it ignores
# duplicate records)
return False
return user
def cas_login(
self,
next=DEFAULT,
@@ -2038,17 +2080,18 @@ class Auth(object):
except:
pass
### use session for federated login
if self.next:
session._auth_next = self.next
### use session for federated login
snext = self.get_vars_next()
if snext:
session._auth_next = snext
elif session._auth_next:
self.next = session._auth_next
snext = session._auth_next
### pass
if next is DEFAULT:
# important for security
next = self.settings.login_next
user_next = self.next
user_next = snext
if user_next:
external = user_next.split('://')
if external[0].lower() in ['http', 'https', 'ftp']:
@@ -2292,7 +2335,7 @@ class Auth(object):
redirect(self.settings.logged_url,
client_side=self.settings.client_side)
if next is DEFAULT:
next = self.next or self.settings.register_next
next = self.get_vars_next() or self.settings.register_next
if onvalidation is DEFAULT:
onvalidation = self.settings.register_onvalidation
if onaccept is DEFAULT:
@@ -2487,7 +2530,7 @@ class Auth(object):
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.next or self.settings.retrieve_username_next
next = self.get_vars_next() or self.settings.retrieve_username_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_username_onvalidation
if onaccept is DEFAULT:
@@ -2569,7 +2612,7 @@ class Auth(object):
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.next or self.settings.retrieve_password_next
next = self.get_vars_next() or self.settings.retrieve_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_password_onvalidation
if onaccept is DEFAULT:
@@ -2645,7 +2688,7 @@ class Auth(object):
session = current.session
if next is DEFAULT:
next = self.next or self.settings.reset_password_next
next = self.get_vars_next() or self.settings.reset_password_next
try:
key = request.vars.key or getarg(-1)
t0 = int(key.split('-')[0])
@@ -2706,7 +2749,7 @@ class Auth(object):
(self.settings.retrieve_password_captcha != False and self.settings.captcha)
if next is DEFAULT:
next = self.next or self.settings.request_reset_password_next
next = self.get_vars_next() or self.settings.request_reset_password_next
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
@@ -2809,7 +2852,7 @@ class Auth(object):
request = current.request
session = current.session
if next is DEFAULT:
next = self.next or self.settings.change_password_next
next = self.get_vars_next() or self.settings.change_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.change_password_onvalidation
if onaccept is DEFAULT:
@@ -2878,7 +2921,7 @@ class Auth(object):
request = current.request
session = current.session
if next is DEFAULT:
next = self.next or self.settings.profile_next
next = self.get_vars_next() or self.settings.profile_next
if onvalidation is DEFAULT:
onvalidation = self.settings.profile_onvalidation
if onaccept is DEFAULT:
+23 -16
View File
@@ -63,11 +63,13 @@ def run_system_tests(options):
if options.with_coverage:
try:
import coverage
coverage_config = os.environ.get("COVERAGE_PROCESS_START",
os.path.join('gluon', 'tests', 'coverage.ini')
)
call_args = ['coverage', 'run', '--rcfile=%s' % coverage_config,
'-m', 'unittest', '-v', 'gluon.tests']
coverage_config = os.environ.get(
"COVERAGE_PROCESS_START",
os.path.join('gluon', 'tests', 'coverage.ini'))
call_args = ['coverage', 'run', '--rcfile=%s' %
coverage_config,
'-m', 'unittest', '-v', 'gluon.tests']
except:
sys.stderr.write('Coverage was not installed, skipping\n')
sys.stderr.write("Python 2.7\n")
@@ -157,7 +159,8 @@ def presentation(root):
# Prevent garbage collection of img
pnl.image = img
def add_label(text='Change Me', font_size=12, foreground='#195866', height=1):
def add_label(text='Change Me', font_size=12,
foreground='#195866', height=1):
return Tkinter.Label(
master=canvas,
width=250,
@@ -331,11 +334,13 @@ class web2pyDialog(object):
apps = []
available_apps = [arq for arq in os.listdir('applications/')]
available_apps = [arq for arq in available_apps
if os.path.exists('applications/%s/models/scheduler.py' % arq)]
if os.path.exists(
'applications/%s/models/scheduler.py' % arq)]
if start:
#the widget takes care of starting the scheduler
if self.options.scheduler and self.options.with_scheduler:
apps = [app.strip() for app in self.options.scheduler.split(',')
apps = [app.strip() for app
in self.options.scheduler.split(',')
if app in available_apps]
for app in apps:
self.try_start_scheduler(app)
@@ -419,13 +424,14 @@ class web2pyDialog(object):
""" Connect pages """
#reset the menu
available_apps = [arq for arq in os.listdir('applications/')
if os.path.exists('applications/%s/__init__.py' % arq)]
if os.path.exists(
'applications/%s/__init__.py' % arq)]
self.pagesmenu.delete(0, len(available_apps))
for arq in available_apps:
url = self.url + arq
self.pagesmenu.add_command(label=url,
command=lambda u=url: start_browser(u))
self.pagesmenu.add_command(
label=url, command=lambda u=url: start_browser(u))
def quit(self, justHide=False):
""" Finish the program execution """
if justHide:
@@ -478,7 +484,8 @@ class web2pyDialog(object):
return self.error('invalid port number')
# Check for non default value for ssl inputs
if (len(self.options.ssl_certificate) > 0) or (len(self.options.ssl_private_key) > 0):
if (len(self.options.ssl_certificate) > 0 or
len(self.options.ssl_private_key) > 0):
proto = 'https'
else:
proto = 'http'
@@ -521,8 +528,8 @@ class web2pyDialog(object):
self.button_stop.configure(state='normal')
if not options.taskbar:
thread.start_new_thread(start_browser,
(get_url(ip, proto=proto, port=port), True))
thread.start_new_thread(
start_browser, (get_url(ip, proto=proto, port=port), True))
self.password.configure(state='readonly')
[ip.configure(state='disabled') for ip in self.ips.values()]
@@ -1110,7 +1117,7 @@ def start(cron=True):
if not options.args is None:
sys.argv[:] = options.args
run(options.shell, plain=options.plain, bpython=options.bpython,
import_models=options.import_models, startfile=options.run)
import_models=options.import_models, startfile=options.run, cronjob=options.cronjob)
return
# ## if -C start cron run (extcron) and exit
+102
View File
@@ -0,0 +1,102 @@
import os
import sys
import glob
import shutil
import re
regex_link = re.compile("""(href|src)\s*=\s*("|')(.+?)("|')""")
def getname(filename):
return re.compile('\W').sub('',filename.split('/')[-1].rsplit('.',1)[0])
def make_controller(html_files):
controller = ''
for filename in html_files:
name = getname(filename)
controller += 'def %s():\n return locals()\n\n' % name
return controller
def fix_links(html,prefix):
def fix(match):
href,link = match.group(1), match.group(3)
if not '://' in link:
if link.lower().endswith('.html') and not '/' in link:
link = "{{=URL('%s','%s')}}" % (prefix, getname(link))
elif link.startswith('./'):
link = "{{=URL('static','%s/%s')}}" % (prefix,link[2:])
elif link.startswith('/'):
link = "{{=URL('static','%s/%s')}}" % (prefix,link[1:])
else:
link = "{{=URL('static','%s/%s')}}" % (prefix,link)
return '%s="%s"' % (href,link)
return regex_link.sub(fix,html)
def make_views(html_files,prefix):
views = {}
layout_name = os.path.join(prefix,'layout.html')
extend = "{{extend '%s'}}" % layout_name
for filename in html_files:
html = open(filename).read()
name = getname(filename)
views[os.path.join(prefix,name+'.html')] = fix_links(html,prefix)
start = stop = None
k = 0
while start is None or stop is None:
try:
if start is None:
if len(set(v[k] for v in views.values()))>1:
start=k
if stop is None:
if len(set(v[len(v)-k] for v in views.values()))>1:
stop=k
except:
if start is None:
start = k
if stop is None:
stop = k
k+=1
header = footer = ''
for name in views:
html = views[name]
n = len(html)
header, views[name], footer = \
html[:start], extend+html[start:n-stop], html[n-stop:]
layout_html = header+'{{include}}'+footer
views[layout_name] = layout_html
return views
def recursive_overwrite(src, dest, ignore=None):
if os.path.isdir(src):
if not os.path.isdir(dest):
os.makedirs(dest)
files = os.listdir(src)
if ignore is not None:
ignored = ignore(src, files)
else:
ignored = set()
for f in files:
if f not in ignored:
recursive_overwrite(os.path.join(src, f),
os.path.join(dest, f),
ignore)
else:
shutil.copyfile(src, dest)
def convert(source, destination,prefix='imported'):
html_files = glob.glob(os.path.join(source,'*.html'))
static_folder = os.path.join(destination,'static',prefix)
recursive_overwrite(source,static_folder)
controller = make_controller(html_files)
views = make_views(html_files,prefix)
controller_filename = os.path.join(destination,'controllers',prefix+'.py')
open(controller_filename,'w').write(controller)
for name in views:
fullname = os.path.join(destination,'views',name)
if not os.path.exists(os.path.split(fullname)[0]):
os.makedirs(os.path.split(fullname)[0])
open(fullname,'w').write(views[name])
convert(sys.argv[1],sys.argv[2])