@@ -59,3 +59,4 @@ HOWTO-web2py-devel
|
||||
*.sublime-workspace
|
||||
.idea/*
|
||||
site-packages/
|
||||
logs/
|
||||
|
||||
@@ -18,6 +18,10 @@ before_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install --download-cache $HOME/.pip-cache unittest2; fi
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then pip install --download-cache $HOME/.pip-cache coverage; fi;
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then pip install --download-cache $HOME/.pip-cache codecov; fi
|
||||
- mysql -e 'create database pydal;'
|
||||
- psql -c 'create database pydal;' -U postgres
|
||||
- psql -c 'create extension postgis;' -U postgres -d pydal;
|
||||
- psql -c 'SHOW SERVER_VERSION' -U postgres
|
||||
|
||||
|
||||
script: export COVERAGE_PROCESS_START=gluon/tests/coverage.ini; ./web2py.py --run_system_tests --with_coverage
|
||||
@@ -28,3 +32,6 @@ after_success:
|
||||
|
||||
notifications:
|
||||
email: true
|
||||
|
||||
addons:
|
||||
postgresql: "9.4"
|
||||
|
||||
@@ -1,3 +1,65 @@
|
||||
## trunk
|
||||
- new JWT implementation (experimental)
|
||||
- new gluon.contrib.redis_scheduler
|
||||
- BREAKING: changes to gluon.contrib.redis_cache
|
||||
BEFORE:
|
||||
from gluon.contrib.redis_cache import RedisCache
|
||||
cache.redis = RedisCache('localhost:6379',db=None, debug=True)
|
||||
NOW:
|
||||
from gluon.contrib.redis_utils import RConn
|
||||
from gluon.contrib.redis_cache import RedisCache
|
||||
rconn = RConn()
|
||||
# or RConn(host='localhost', port=6379,
|
||||
# db=0, password=None, socket_timeout=None,
|
||||
# socket_connect_timeout=None, .....)
|
||||
# exactly as a redis.StrictRedis instance
|
||||
cache.redis = RedisCache(redis_conn=rconn, debug=True)
|
||||
- BREAKING: changes to gluon.contrib.redis_session
|
||||
BEFORE:
|
||||
from gluon.contrib.redis_session import RedisSession
|
||||
sessiondb = RedisSession('localhost:6379',db=0, session_expiry=False)
|
||||
session.connect(request, response, db = sessiondb)
|
||||
NOW:
|
||||
from gluon.contrib.redis_utils import RConn
|
||||
from gluon.contrib.redis_session import RedisSession
|
||||
rconn = RConn()
|
||||
sessiondb = RedisSession(redis_conn=rconn, session_expiry=False)
|
||||
session.connect(request, response, db = sessiondb)
|
||||
|
||||
|
||||
## 2.13.1-2
|
||||
|
||||
- fixed a security issue in request_reset_password
|
||||
- added fabfile.py
|
||||
- fixed oauth2 renew token, thanks dokime7
|
||||
- fixed add_membership, del_membership, add_membership IntegrityError (when auth.enable_record_versioning)
|
||||
- allow passing unicode to template render
|
||||
- allow IS_NOT_IN_DB to work with custom primarykey, thanks timmyborg
|
||||
- allow HttpOnly cookies
|
||||
- french pluralizaiton rules, thanks Mathieu Clabaut
|
||||
- fixed bug in redirect to cas service, thanks Fernando González
|
||||
- allow deploying to pythonanywhere from the web2py admin that you're running locally, thanks Leonel
|
||||
- better tests
|
||||
- many more bug fixes
|
||||
|
||||
## 2.12.1-3
|
||||
|
||||
- security fix: Validate for open redirect everywhere, not just in login()
|
||||
- allow to pack invidual apps and selected files as packed exe files
|
||||
- allow bulk user registration with default bulk_register_enabled=False
|
||||
- allow unsorted multiword query in grid search
|
||||
- better MongoDB support with newer pyDAL
|
||||
- enable <app>/appadmin/manage/auth by default for user admin
|
||||
- allow mail.settings.server='logging:filename' to log emails to a file
|
||||
- better caching logic
|
||||
- fixed order of confirm-password field
|
||||
- TLS support in ldap
|
||||
- prettydate can do UTC
|
||||
- jquery 1.11.3
|
||||
- bootstrap 3.3.5
|
||||
- moved to codecov and enabled appveyor
|
||||
- many bug fixes
|
||||
|
||||
## 2.11.1
|
||||
|
||||
- Many small but significative improvements and bug fixes
|
||||
|
||||
@@ -11,7 +11,7 @@ clean:
|
||||
find ./ -name '*.rej' -exec rm -f {} \;
|
||||
find ./ -name '#*' -exec rm -f {} \;
|
||||
find ./ -name 'Thumbs.db' -exec rm -f {} \;
|
||||
find ./gluon/ -name '.*' -exec rm -f {} \;
|
||||
# find ./gluon/ -name '.*' -exec rm -f {} \;
|
||||
find ./gluon/ -name '*class' -exec rm -f {} \;
|
||||
find ./applications/admin/ -name '.*' -exec rm -f {} \;
|
||||
find ./applications/examples/ -name '.*' -exec rm -f {} \;
|
||||
@@ -32,7 +32,7 @@ update:
|
||||
echo "remember that pymysql was tweaked"
|
||||
src:
|
||||
### Use semantic versioning
|
||||
echo 'Version 2.11.2-stable+timestamp.'`date +%Y.%m.%d.%H.%M.%S` > VERSION
|
||||
echo 'Version 2.13.4-stable+timestamp.'`date +%Y.%m.%d.%H.%M.%S` > VERSION
|
||||
### rm -f all junk files
|
||||
make clean
|
||||
### clean up baisc apps
|
||||
|
||||
@@ -1 +1 @@
|
||||
Version 2.11.2-stable+timestamp.2015.05.30.11.29.46
|
||||
Version 2.13.3-stable+timestamp.2015.12.24.08.08.22
|
||||
|
||||
@@ -445,30 +445,31 @@ def ccache():
|
||||
gae_stats['oldest'] = GetInHMS(time.time() - gae_stats['oldest_item_age'])
|
||||
total.update(gae_stats)
|
||||
else:
|
||||
# get ram stats directly from the cache object
|
||||
ram_stats = cache.ram.stats[request.application]
|
||||
ram['hits'] = ram_stats['hit_total'] - ram_stats['misses']
|
||||
ram['misses'] = ram_stats['misses']
|
||||
try:
|
||||
ram['ratio'] = ram['hits'] * 100 / ram_stats['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
|
||||
for key, value in cache.ram.storage.iteritems():
|
||||
if isinstance(value, dict):
|
||||
ram['hits'] = value['hit_total'] - value['misses']
|
||||
ram['misses'] = value['misses']
|
||||
try:
|
||||
ram['ratio'] = ram['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
ram['entries'] += 1
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
ram['entries'] += 1
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
|
||||
for key in cache.disk.storage:
|
||||
value = cache.disk.storage[key]
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
if isinstance(value[1], dict):
|
||||
disk['hits'] = value[1]['hit_total'] - value[1]['misses']
|
||||
disk['misses'] = value[1]['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
disk['ratio'] = disk['hits'] * 100 / value[1]['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
@@ -485,7 +486,7 @@ def ccache():
|
||||
ram_keys.remove('oldest')
|
||||
for key in ram_keys:
|
||||
total[key] = ram[key] + disk[key]
|
||||
|
||||
|
||||
try:
|
||||
total['ratio'] = total['hits'] * 100 / (total['hits'] +
|
||||
total['misses'])
|
||||
@@ -575,7 +576,7 @@ def bg_graph_model():
|
||||
meta_graphmodel = dict(group=request.application, color='#ECECEC')
|
||||
|
||||
group = meta_graphmodel['group'].replace(' ', '')
|
||||
if not subgraphs.has_key(group):
|
||||
if group not in subgraphs:
|
||||
subgraphs[group] = dict(meta=meta_graphmodel, tables=[])
|
||||
subgraphs[group]['tables'].append(tablename)
|
||||
|
||||
|
||||
@@ -484,9 +484,15 @@ def cleanup():
|
||||
|
||||
def compile_app():
|
||||
app = get_app()
|
||||
c = app_compile(app, request)
|
||||
c = app_compile(app, request,
|
||||
skip_failed_views = (request.args(1) == 'skip_failed_views'))
|
||||
if not c:
|
||||
session.flash = T('application compiled')
|
||||
elif isinstance(c, list):
|
||||
session.flash = DIV(*[T('application compiled'), BR(), BR(),
|
||||
T('WARNING: The following views could not be compiled:'), BR()] +
|
||||
[CAT(BR(), view) for view in c] +
|
||||
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
|
||||
else:
|
||||
session.flash = DIV(T('Cannot compile: there are errors in your app:'),
|
||||
CODE(c))
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import os
|
||||
import re
|
||||
import gzip
|
||||
import tarfile
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
from xmlrpclib import ProtocolError
|
||||
from gluon.contrib.simplejsonrpc import ServerProxy
|
||||
|
||||
|
||||
def deploy():
|
||||
response.title = T('Deploy to pythonanywhere')
|
||||
return {}
|
||||
|
||||
|
||||
def create_account():
|
||||
""" Create a PythonAnywhere account """
|
||||
if not request.vars:
|
||||
raise HTTP(400)
|
||||
|
||||
if request.vars.username and request.vars.web2py_admin_password:
|
||||
# Check if web2py is already there otherwise we get an error 500 too.
|
||||
client = ServerProxy('https://%(username)s:%(web2py_admin_password)s@%(username)s.pythonanywhere.com/admin/webservices/call/jsonrpc' % request.vars)
|
||||
try:
|
||||
if client.login() is True:
|
||||
return response.json({'status': 'ok'})
|
||||
except ProtocolError as error:
|
||||
pass
|
||||
|
||||
import urllib, urllib2
|
||||
url = 'https://www.pythonanywhere.com/api/web2py/create_account'
|
||||
data = urllib.urlencode(request.vars)
|
||||
req = urllib2.Request(url, data)
|
||||
|
||||
try:
|
||||
reply = urllib2.urlopen(req)
|
||||
except urllib2.HTTPError as error:
|
||||
if error.code == 400:
|
||||
reply = error
|
||||
elif error.code == 500:
|
||||
return response.json({'status':'error', 'errors':{'username': ['An App other than web2py is installed in the domain %(username)s.pythonanywhere.com' % request.vars]}})
|
||||
else:
|
||||
raise
|
||||
response.headers['Content-Type'] = 'application/json'
|
||||
return reply.read()
|
||||
|
||||
|
||||
def list_apps():
|
||||
""" Get a list of apps both remote and local """
|
||||
if not request.vars.username or not request.vars.password:
|
||||
raise HTTP(400)
|
||||
client = ServerProxy('https://%(username)s:%(password)s@%(username)s.pythonanywhere.com/admin/webservices/call/jsonrpc' % request.vars)
|
||||
regex = re.compile('^\w+$')
|
||||
local = [f for f in os.listdir(apath(r=request)) if regex.match(f)]
|
||||
try:
|
||||
pythonanywhere = client.list_apps()
|
||||
except ProtocolError as error:
|
||||
raise HTTP(error.errcode)
|
||||
return response.json({'local': local, 'pythonanywhere': pythonanywhere})
|
||||
|
||||
|
||||
def bulk_install():
|
||||
""" Install a list of apps """
|
||||
|
||||
def b64pack(app):
|
||||
"""
|
||||
Given an app's name, return the base64 representation of its packed version.
|
||||
"""
|
||||
folder = apath(app, r=request)
|
||||
tmpfile = StringIO()
|
||||
tar = tarfile.TarFile(fileobj=tmpfile, mode='w')
|
||||
try:
|
||||
filenames = listdir(folder, '^[\w\.\-]+$', add_dirs=True,
|
||||
exclude_content_from=['cache', 'sessions', 'errors'])
|
||||
for fname in filenames:
|
||||
tar.add(os.path.join(folder, fname), fname, False)
|
||||
finally:
|
||||
tar.close()
|
||||
tmpfile.seek(0)
|
||||
gzfile = StringIO()
|
||||
w2pfp = gzip.GzipFile(fileobj=gzfile, mode='wb')
|
||||
w2pfp.write(tmpfile.read())
|
||||
w2pfp.close()
|
||||
gzfile.seek(0)
|
||||
return base64.b64encode(gzfile.read())
|
||||
|
||||
request.vars.apps = request.vars['apps[]']
|
||||
if not request.vars.apps or not request.vars.username or not request.vars.password:
|
||||
raise HTTP(400)
|
||||
if not isinstance(request.vars.apps, list):
|
||||
request.vars.apps = [request.vars.apps] # Only one app selected
|
||||
|
||||
client = ServerProxy('https://%(username)s:%(password)s@%(username)s.pythonanywhere.com/admin/webservices/call/jsonrpc' % request.vars)
|
||||
|
||||
for app in request.vars.apps:
|
||||
try:
|
||||
client.install(app, app+'.w2p', b64pack(app))
|
||||
except ProtocolError as error:
|
||||
raise HTTP(error.errcode)
|
||||
|
||||
return response.json({'status': 'ok'})
|
||||
+408
-377
@@ -1,377 +1,408 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'pt',
|
||||
'!langname!': 'Português',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "campo1=\'novo_valor\'". Não é permitido atualizar ou apagar resultados de um JOIN',
|
||||
'%s %%{row} deleted': '%s registros apagados',
|
||||
'%s %%{row} updated': '%s registros atualizados',
|
||||
'%Y-%m-%d': '%d/%m/%Y',
|
||||
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
|
||||
'(requires internet access)': '(requer acesso à internet)',
|
||||
'(requires internet access, experimental)': '(requer acesso à internet, experimental)',
|
||||
'(something like "it-it")': '(algo como "it-it")',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(file **gluon/contrib/plural_rules/%s.py** is not found)',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
|
||||
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
|
||||
'A new version of web2py is available': 'Está disponível uma nova versão do web2py',
|
||||
'A new version of web2py is available: %s': 'Está disponível uma nova versão do web2py: %s',
|
||||
'About': 'sobre',
|
||||
'About application': 'Sobre a aplicação',
|
||||
'additional code for your application': 'código adicional para sua aplicação',
|
||||
'Additional code for your application': 'Código adicional para a sua aplicação',
|
||||
'admin disabled because no admin password': ' admin desabilitado por falta de senha definida',
|
||||
'admin disabled because not supported on google app engine': 'admin dehabilitado, não é soportado no GAE',
|
||||
'admin disabled because unable to access password file': 'admin desabilitado, não foi possível ler o arquivo de senha',
|
||||
'Admin is disabled because insecure channel': 'Admin desabilitado pois o canal não é seguro',
|
||||
'Admin is disabled because unsecure channel': 'Admin desabilitado pois o canal não é seguro',
|
||||
'Admin language': 'Linguagem do Admin',
|
||||
'administrative interface': 'interface administrativa',
|
||||
'Administrator Password:': 'Senha de administrador:',
|
||||
'and rename it (required):': 'e renomeie (requerido):',
|
||||
'and rename it:': ' e renomeie:',
|
||||
'appadmin': 'appadmin',
|
||||
'appadmin is disabled because insecure channel': 'admin desabilitado, canal inseguro',
|
||||
'application "%s" uninstalled': 'aplicação "%s" desinstalada',
|
||||
'application compiled': 'aplicação compilada',
|
||||
'application is compiled and cannot be designed': 'A aplicação está compilada e não pode ser modificada',
|
||||
'Application name:': 'Nome da aplicação:',
|
||||
'are not used': 'não usadas',
|
||||
'are not used yet': 'ainda não usadas',
|
||||
'Are you sure you want to delete file "%s"?': 'Tem certeza que deseja apagar o arquivo "%s"?',
|
||||
'Are you sure you want to delete plugin "%s"?': 'Tem certeza que deseja apagar o plugin "%s"?',
|
||||
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
|
||||
'Are you sure you want to uninstall application "%s"': 'Tem certeza que deseja apagar a aplicação "%s"?',
|
||||
'Are you sure you want to uninstall application "%s"?': 'Tem certeza que deseja apagar a aplicação "%s"?',
|
||||
'Are you sure you want to upgrade web2py now?': 'Tem certeza que deseja atualizar o web2py agora?',
|
||||
'arguments': 'argumentos',
|
||||
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENÇÃO o login requer uma conexão segura (HTTPS) ou executar de localhost.',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENÇÃO OS TESTES NÃO THREAD SAFE, NÃO EFETUE MÚLTIPLOS TESTES AO MESMO TEMPO.',
|
||||
'ATTENTION: you cannot edit the running application!': 'ATENÇÃO: Não pode modificar a aplicação em execução!',
|
||||
'Autocomplete Python Code': 'Autocompletar Código Python',
|
||||
'Available databases and tables': 'Bancos de dados e tabelas disponíveis',
|
||||
'back': 'voltar',
|
||||
'browse': 'buscar',
|
||||
'cache': 'cache',
|
||||
'cache, errors and sessions cleaned': 'cache, erros e sessões eliminadas',
|
||||
'can be a git repo': 'can be a git repo',
|
||||
'Cannot be empty': 'Não pode ser vazio',
|
||||
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Não é possível compilar: Existem erros em sua aplicação. Depure, corrija os errros e tente novamente',
|
||||
'Cannot compile: there are errors in your app:': 'Não é possível compilar: Existem erros em sua aplicação',
|
||||
'cannot create file': 'Não é possível criar o arquivo',
|
||||
'cannot upload file "%(filename)s"': 'não é possível fazer upload do arquivo "%(filename)s"',
|
||||
'Change admin password': 'mudar senha de administrador',
|
||||
'change editor settings': 'mudar definições do editor',
|
||||
'Change Password': 'Trocar Senha',
|
||||
'check all': 'marcar todos',
|
||||
'Check for upgrades': 'checar por atualizações',
|
||||
'Check to delete': 'Marque para apagar',
|
||||
'Checking for upgrades...': 'Buscando atualizações...',
|
||||
'Clean': 'limpar',
|
||||
'click here for online examples': 'clique para ver exemplos online',
|
||||
'click here for the administrative interface': 'Clique aqui para acessar a interface administrativa',
|
||||
'Click row to expand traceback': 'Clique em uma coluna para expandir o log do erro',
|
||||
'click to check for upgrades': 'clique aqui para checar por atualizações',
|
||||
'click to open': 'clique para abrir',
|
||||
'Client IP': 'IP do cliente',
|
||||
'code': 'código',
|
||||
'collapse/expand all': 'colapsar/expandir tudo',
|
||||
'commit (mercurial)': 'commit (mercurial)',
|
||||
'Compile': 'compilar',
|
||||
'compiled application removed': 'aplicação compilada removida',
|
||||
'Controllers': 'Controladores',
|
||||
'controllers': 'controladores',
|
||||
'Count': 'Contagem',
|
||||
'Create': 'criar',
|
||||
'create file with filename:': 'criar um arquivo com o nome:',
|
||||
'Create new application using the Wizard': 'Criar nova aplicação utilizando o assistente',
|
||||
'create new application:': 'nome da nova aplicação:',
|
||||
'Create new simple application': 'Crie uma nova aplicação',
|
||||
'Create/Upload': 'Create/Upload',
|
||||
'created by': 'criado por',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Requisição atual',
|
||||
'Current response': 'Resposta atual',
|
||||
'Current session': 'Sessão atual',
|
||||
'currently running': 'Executando',
|
||||
'currently saved or': 'Atualmente salvo ou',
|
||||
'customize me!': 'Modifique-me',
|
||||
'data uploaded': 'Dados enviados',
|
||||
'database': 'banco de dados',
|
||||
'database %s select': 'Seleção no banco de dados %s',
|
||||
'database administration': 'administração de banco de dados',
|
||||
'Date and Time': 'Data e Hora',
|
||||
'db': 'db',
|
||||
'Debug': 'Debug',
|
||||
'defines tables': 'define as tabelas',
|
||||
'Delete': 'Apague',
|
||||
'delete': 'apagar',
|
||||
'delete all checked': 'apagar marcados',
|
||||
'delete plugin': 'apagar plugin',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
|
||||
'Delete:': 'Apague:',
|
||||
'Deploy': 'publicar',
|
||||
'Deploy on Google App Engine': 'Publicar no Google App Engine',
|
||||
'Deploy to OpenShift': 'Deploy to OpenShift',
|
||||
'Description': 'Descrição',
|
||||
'design': 'modificar',
|
||||
'DESIGN': 'Projeto',
|
||||
'Design for': 'Projeto de',
|
||||
'Detailed traceback description': 'Detailed traceback description',
|
||||
'direction: ltr': 'direção: ltr',
|
||||
'Disable': 'Disable',
|
||||
'docs': 'docs',
|
||||
'done!': 'feito!',
|
||||
'download layouts': 'download layouts',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'download plugins': 'download plugins',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'E-mail': 'E-mail',
|
||||
'EDIT': 'EDITAR',
|
||||
'Edit': 'editar',
|
||||
'Edit application': 'Editar aplicação',
|
||||
'edit controller': 'editar controlador',
|
||||
'Edit current record': 'Editar o registro atual',
|
||||
'Edit Profile': 'Editar Perfil',
|
||||
'edit views:': 'editar visões:',
|
||||
'Editing %s': 'A Editar %s',
|
||||
'Editing file': 'Editando arquivo',
|
||||
'Editing file "%s"': 'Editando arquivo "%s"',
|
||||
'Editing Language file': 'Editando arquivo de linguagem',
|
||||
'Enterprise Web Framework': 'Framework web empresarial',
|
||||
'Error': 'Erro',
|
||||
'Error logs for "%(app)s"': 'Logs de erro para "%(app)s"',
|
||||
'Error snapshot': 'Error snapshot',
|
||||
'Error ticket': 'Error ticket',
|
||||
'Errors': 'erros',
|
||||
'Exception instance attributes': 'Atributos da instancia de excessão',
|
||||
'Exit Fullscreen': 'Sair de Ecrã Inteiro',
|
||||
'Expand Abbreviation (html files only)': 'Expandir Abreviação (só para ficheiros html)',
|
||||
'export as csv file': 'exportar como arquivo CSV',
|
||||
'exposes': 'expõe',
|
||||
'extends': 'estende',
|
||||
'failed to reload module': 'Falha ao recarregar o módulo',
|
||||
'failed to reload module because:': 'falha ao recarregar o módulo por:',
|
||||
'File': 'Arquivo',
|
||||
'file "%(filename)s" created': 'arquivo "%(filename)s" criado',
|
||||
'file "%(filename)s" deleted': 'arquivo "%(filename)s" apagado',
|
||||
'file "%(filename)s" uploaded': 'arquivo "%(filename)s" enviado',
|
||||
'file "%(filename)s" was not deleted': 'arquivo "%(filename)s" não foi apagado',
|
||||
'file "%s" of %s restored': 'arquivo "%s" de %s restaurado',
|
||||
'file changed on disk': 'arquivo modificado no disco',
|
||||
'file does not exist': 'arquivo não existe',
|
||||
'file saved on %(time)s': 'arquivo salvo em %(time)s',
|
||||
'file saved on %s': 'arquivo salvo em %s',
|
||||
'filter': 'filtro',
|
||||
'Find Next': 'Localizar Seguinte',
|
||||
'Find Previous': 'Localizar Anterior',
|
||||
'First name': 'Nome',
|
||||
'Frames': 'Frames',
|
||||
'Functions with no doctests will result in [passed] tests.': 'Funções sem doctests resultarão em testes [aceitos].',
|
||||
'graph model': 'graph model',
|
||||
'Group ID': 'ID do Grupo',
|
||||
'Hello World': 'Olá Mundo',
|
||||
'Help': 'ajuda',
|
||||
'Hide/Show Translated strings': '',
|
||||
'htmledit': 'htmledit',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Se o relatório acima contém um número de ticket, isso indica uma falha no controlador em execução, antes de tantar executar os doctests. Isto acontece geralmente por erro de endentação ou erro fora do código da função.\r\nO titulo em verde indica que os testes (se definidos) passaram. Neste caso os testes não são mostrados.',
|
||||
'Import/Export': 'Importar/Exportar',
|
||||
'includes': 'inclui',
|
||||
'insert new': 'inserir novo',
|
||||
'insert new %s': 'inserir novo %s',
|
||||
'inspect attributes': 'inspecionar atributos',
|
||||
'Install': 'instalar',
|
||||
'Installed applications': 'Aplicações instaladas',
|
||||
'internal error': 'erro interno',
|
||||
'Internal State': 'Estado Interno',
|
||||
'Invalid action': 'Ação inválida',
|
||||
'Invalid email': 'E-mail inválido',
|
||||
'invalid password': 'senha inválida',
|
||||
'Invalid Query': 'Consulta inválida',
|
||||
'invalid request': 'solicitação inválida',
|
||||
'invalid ticket': 'ticket inválido',
|
||||
'Keyboard shortcuts': 'Atalhos de teclado',
|
||||
'language file "%(filename)s" created/updated': 'arquivo de linguagem "%(filename)s" criado/atualizado',
|
||||
'Language files (static strings) updated': 'Arquivos de linguagem (textos estáticos) atualizados',
|
||||
'languages': 'linguagens',
|
||||
'Languages': 'Linguagens',
|
||||
'languages updated': 'linguagens atualizadas',
|
||||
'Last name': 'Sobrenome',
|
||||
'Last saved on:': 'Salvo em:',
|
||||
'License for': 'Licença para',
|
||||
'loading...': 'carregando...',
|
||||
'locals': 'locals',
|
||||
'Login': 'Entrar',
|
||||
'login': 'inicio de sessão',
|
||||
'Login to the Administrative Interface': 'Entrar na interface adminitrativa',
|
||||
'Logout': 'finalizar sessão',
|
||||
'Lost Password': 'Senha perdida',
|
||||
'Manage': 'Manage',
|
||||
'manage': 'gerenciar',
|
||||
'merge': 'juntar',
|
||||
'Models': 'Modelos',
|
||||
'models': 'modelos',
|
||||
'Modules': 'Módulos',
|
||||
'modules': 'módulos',
|
||||
'Name': 'Nome',
|
||||
'new application "%s" created': 'nova aplicação "%s" criada',
|
||||
'New application wizard': 'Assistente para novas aplicações ',
|
||||
'new plugin installed': 'novo plugin instalado',
|
||||
'New Record': 'Novo registro',
|
||||
'new record inserted': 'novo registro inserido',
|
||||
'New simple application': 'Nova aplicação básica',
|
||||
'next 100 rows': 'próximos 100 registros',
|
||||
'NO': 'NÃO',
|
||||
'No databases in this application': 'Não existem bancos de dados nesta aplicação',
|
||||
'no match': 'não encontrado',
|
||||
'no package selected': 'nenhum pacote selecionado',
|
||||
'online designer': 'online designer',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Ou Obtenha do URL:',
|
||||
'or import from csv file': 'ou importar de um arquivo CSV',
|
||||
'or provide app url:': 'ou forneça a url de uma aplicação:',
|
||||
'or provide application url:': 'ou forneça a url de uma aplicação:',
|
||||
'Origin': 'Origem',
|
||||
'Original/Translation': 'Original/Tradução',
|
||||
'Overwrite installed app': 'sobrescrever aplicação instalada',
|
||||
'Pack all': 'criar pacote',
|
||||
'Pack compiled': 'criar pacote compilado',
|
||||
'Pack custom': 'Pack custom',
|
||||
'pack plugin': 'empacotar plugin',
|
||||
'PAM authenticated user, cannot change password here': 'usuario autenticado por PAM, não pode alterar a senha por aqui',
|
||||
'Password': 'Senha',
|
||||
'password changed': 'senha alterada',
|
||||
'Peeking at file': 'Visualizando arquivo',
|
||||
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" eliminado',
|
||||
'Plugin "%s" in application': 'Plugin "%s" na aplicação',
|
||||
'plugins': 'plugins',
|
||||
'Plugins': 'Plugins',
|
||||
'Plural-Forms:': 'Plural-Forms:',
|
||||
'Powered by': 'Este site utiliza',
|
||||
'previous 100 rows': '100 registros anteriores',
|
||||
'Private files': 'Private files',
|
||||
'private files': 'private files',
|
||||
'Query:': 'Consulta:',
|
||||
'Rapid Search': 'Rapid Search',
|
||||
'record': 'registro',
|
||||
'record does not exist': 'o registro não existe',
|
||||
'record id': 'id do registro',
|
||||
'Record ID': 'ID do Registro',
|
||||
'Register': 'Registrar-se',
|
||||
'Registration key': 'Chave de registro',
|
||||
'Reload routes': 'Reload routes',
|
||||
'Remove compiled': 'eliminar compilados',
|
||||
'Replace': 'Substituir',
|
||||
'Replace All': 'Substituir Tudo',
|
||||
'request': 'request',
|
||||
'Resolve Conflict file': 'Arquivo de resolução de conflito',
|
||||
'response': 'response',
|
||||
'restore': 'restaurar',
|
||||
'revert': 'reverter',
|
||||
'Role': 'Papel',
|
||||
'Rows in table': 'Registros na tabela',
|
||||
'Rows selected': 'Registros selecionados',
|
||||
'rules are not defined': 'rules are not defined',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
|
||||
'Running on %s': 'A correr em %s',
|
||||
'Save': 'Save',
|
||||
'save': 'salvar',
|
||||
'Save file:': 'Gravar ficheiro:',
|
||||
'Save file: %s': 'Gravar ficheiro: %s',
|
||||
'Save via Ajax': 'Gravar via Ajax',
|
||||
'Saved file hash:': 'Hash do arquivo salvo:',
|
||||
'selected': 'selecionado(s)',
|
||||
'session': 'session',
|
||||
'session expired': 'sessão expirada',
|
||||
'shell': 'Terminal',
|
||||
'Site': 'site',
|
||||
'some files could not be removed': 'alguns arquicos não puderam ser removidos',
|
||||
'Start searching': 'Start searching',
|
||||
'Start wizard': 'iniciar assistente',
|
||||
'state': 'estado',
|
||||
'Static': 'Static',
|
||||
'static': 'estáticos',
|
||||
'Static files': 'Arquivos estáticos',
|
||||
'Submit': 'Submit',
|
||||
'submit': 'enviar',
|
||||
'Sure you want to delete this object?': 'Tem certeza que deseja apaagr este objeto?',
|
||||
'table': 'tabela',
|
||||
'Table name': 'Nome da tabela',
|
||||
'test': 'testar',
|
||||
'Testing application': 'Testando a aplicação',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "consulta" é uma condição como "db.tabela.campo1==\'valor\'". Algo como "db.tabela1.campo1==db.tabela2.campo2" resulta em um JOIN SQL.',
|
||||
'the application logic, each URL path is mapped in one exposed function in the controller': 'A lógica da aplicação, cada URL é mapeada para uma função exposta pelo controlador',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
|
||||
'the data representation, define database tables and sets': 'A representação dos dadps, define tabelas e estruturas de dados',
|
||||
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
|
||||
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
|
||||
'the presentations layer, views are also known as templates': 'A camada de apresentação, As visões também são chamadas de templates',
|
||||
'There are no controllers': 'Não existem controllers',
|
||||
'There are no models': 'Não existem modelos',
|
||||
'There are no modules': 'Não existem módulos',
|
||||
'There are no plugins': 'There are no plugins',
|
||||
'There are no private files': '',
|
||||
'There are no static files': 'Não existem arquicos estáticos',
|
||||
'There are no translators, only default language is supported': 'Não há traduções, somente a linguagem padrão é suportada',
|
||||
'There are no views': 'Não existem visões',
|
||||
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
|
||||
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
|
||||
'these files are served without processing, your images go here': 'Estes arquivos são servidos sem processamento, suas imagens ficam aqui',
|
||||
'This is the %(filename)s template': 'Este é o template %(filename)s',
|
||||
'Ticket': 'Ticket',
|
||||
'Ticket ID': 'Ticket ID',
|
||||
'Timestamp': 'Data Atual',
|
||||
'TM': 'MR',
|
||||
'to previous version.': 'para a versão anterior.',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'Para criar um plugin, nomeio um arquivo/pasta como plugin_[nome]',
|
||||
'toggle breakpoint': 'toggle breakpoint',
|
||||
'Toggle comment': 'Toggle comment',
|
||||
'Toggle Fullscreen': 'Toggle Fullscreen',
|
||||
'Traceback': 'Traceback',
|
||||
'translation strings for the application': 'textos traduzidos para a aplicação',
|
||||
'Translation strings for the application': 'Translation strings for the application',
|
||||
'try': 'tente',
|
||||
'try something like': 'tente algo como',
|
||||
'Try the mobile interface': 'Try the mobile interface',
|
||||
'Unable to check for upgrades': 'Não é possível checar as atualizações',
|
||||
'unable to create application "%s"': 'não é possível criar a aplicação "%s"',
|
||||
'unable to delete file "%(filename)s"': 'não é possível criar o arquico "%(filename)s"',
|
||||
'unable to delete file plugin "%(plugin)s"': 'não é possível criar o plugin "%(plugin)s"',
|
||||
'Unable to download': 'Não é possível efetuar o download',
|
||||
'Unable to download app': 'Não é possível baixar a aplicação',
|
||||
'Unable to download app because:': 'Não é possível baixar a aplicação porque:',
|
||||
'Unable to download because': 'Não é possível baixar porque',
|
||||
'unable to parse csv file': 'não é possível analisar o arquivo CSV',
|
||||
'unable to uninstall "%s"': 'não é possível instalar "%s"',
|
||||
'unable to upgrade because "%s"': 'não é possível atualizar porque "%s"',
|
||||
'uncheck all': 'desmarcar todos',
|
||||
'Uninstall': 'desinstalar',
|
||||
'update': 'atualizar',
|
||||
'update all languages': 'atualizar todas as linguagens',
|
||||
'Update:': 'Atualizar:',
|
||||
'upgrade web2py now': 'atualize o web2py agora',
|
||||
'upload': 'upload',
|
||||
'Upload': 'Upload',
|
||||
'Upload & install packed application': 'Faça upload e instale uma aplicação empacotada',
|
||||
'Upload a package:': 'Faça upload de um pacote:',
|
||||
'Upload and install packed application': 'Upload and install packed application',
|
||||
'upload application:': 'Fazer upload de uma aplicação:',
|
||||
'Upload existing application': 'Faça upload de uma aplicação existente',
|
||||
'upload file:': 'Enviar arquivo:',
|
||||
'upload plugin file:': 'Enviar arquivo de plugin:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para criar consultas mais complexas.',
|
||||
'Use an url:': 'Use uma url:',
|
||||
'User ID': 'ID do Usuario',
|
||||
'variables': 'variáveis',
|
||||
'Version': 'Versão',
|
||||
'versioning': 'versionamento',
|
||||
'Versioning': 'Versioning',
|
||||
'view': 'visão',
|
||||
'Views': 'Visões',
|
||||
'views': 'visões',
|
||||
'Web Framework': 'Web Framework',
|
||||
'web2py is up to date': 'web2py está atualizado',
|
||||
'web2py Recent Tweets': 'Tweets Recentes de @web2py',
|
||||
'web2py upgraded; please restart it': 'web2py atualizado; favor reiniciar',
|
||||
'Welcome to web2py': 'Bem-vindo ao web2py',
|
||||
'YES': 'SIM',
|
||||
}
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'pt',
|
||||
'!langname!': 'Português',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "campo1=\'novo_valor\'". Não é permitido atualizar ou apagar resultados de um JOIN',
|
||||
'%s %%{row} deleted': '%s registros apagados',
|
||||
'%s %%{row} updated': '%s registros atualizados',
|
||||
'%Y-%m-%d': '%d/%m/%Y',
|
||||
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
|
||||
'(requires internet access)': '(requer acesso à internet)',
|
||||
'(requires internet access, experimental)': '(requer acesso à internet, experimental)',
|
||||
'(something like "it-it")': '(algo como "it-it")',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(file **gluon/contrib/plural_rules/%s.py** is not found)',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
|
||||
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
|
||||
'A new version of web2py is available': 'Está disponível uma nova versão do web2py',
|
||||
'A new version of web2py is available: %s': 'Está disponível uma nova versão do web2py: %s',
|
||||
'About': 'sobre',
|
||||
'About application': 'Sobre a aplicação',
|
||||
'Accept Terms': 'Accept Terms',
|
||||
'additional code for your application': 'código adicional para sua aplicação',
|
||||
'Additional code for your application': 'Código adicional para a sua aplicação',
|
||||
'admin disabled because no admin password': ' admin desabilitado por falta de senha definida',
|
||||
'admin disabled because not supported on google app engine': 'admin dehabilitado, não é soportado no GAE',
|
||||
'admin disabled because unable to access password file': 'admin desabilitado, não foi possível ler o arquivo de senha',
|
||||
'Admin is disabled because insecure channel': 'Admin desabilitado pois o canal não é seguro',
|
||||
'Admin is disabled because unsecure channel': 'Admin desabilitado pois o canal não é seguro',
|
||||
'Admin language': 'Linguagem do Admin',
|
||||
'administrative interface': 'interface administrativa',
|
||||
'Administrator Password:': 'Senha de administrador:',
|
||||
'and rename it (required):': 'e renomeie (requerido):',
|
||||
'and rename it:': ' e renomeie:',
|
||||
'appadmin': 'appadmin',
|
||||
'appadmin is disabled because insecure channel': 'admin desabilitado, canal inseguro',
|
||||
'application "%s" uninstalled': 'aplicação "%s" desinstalada',
|
||||
'application compiled': 'aplicação compilada',
|
||||
'application is compiled and cannot be designed': 'A aplicação está compilada e não pode ser modificada',
|
||||
'Application name:': 'Nome da aplicação:',
|
||||
'are not used': 'não usadas',
|
||||
'are not used yet': 'ainda não usadas',
|
||||
'Are you sure you want to delete file "%s"?': 'Tem certeza que deseja apagar o arquivo "%s"?',
|
||||
'Are you sure you want to delete plugin "%s"?': 'Tem certeza que deseja apagar o plugin "%s"?',
|
||||
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
|
||||
'Are you sure you want to uninstall application "%s"': 'Tem certeza que deseja apagar a aplicação "%s"?',
|
||||
'Are you sure you want to uninstall application "%s"?': 'Tem certeza que deseja apagar a aplicação "%s"?',
|
||||
'Are you sure you want to upgrade web2py now?': 'Tem certeza que deseja atualizar o web2py agora?',
|
||||
'arguments': 'argumentos',
|
||||
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENÇÃO o login requer uma conexão segura (HTTPS) ou executar de localhost.',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENÇÃO OS TESTES NÃO THREAD SAFE, NÃO EFETUE MÚLTIPLOS TESTES AO MESMO TEMPO.',
|
||||
'ATTENTION: you cannot edit the running application!': 'ATENÇÃO: Não pode modificar a aplicação em execução!',
|
||||
'Autocomplete Python Code': 'Autocompletar Código Python',
|
||||
'Available databases and tables': 'Bancos de dados e tabelas disponíveis',
|
||||
'back': 'voltar',
|
||||
'Begin': 'Begin',
|
||||
'browse': 'buscar',
|
||||
'cache': 'cache',
|
||||
'cache, errors and sessions cleaned': 'cache, erros e sessões eliminadas',
|
||||
'can be a git repo': 'can be a git repo',
|
||||
'Cannot be empty': 'Não pode ser vazio',
|
||||
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Não é possível compilar: Existem erros em sua aplicação. Depure, corrija os errros e tente novamente',
|
||||
'Cannot compile: there are errors in your app:': 'Não é possível compilar: Existem erros em sua aplicação',
|
||||
'cannot create file': 'Não é possível criar o arquivo',
|
||||
'cannot upload file "%(filename)s"': 'não é possível fazer upload do arquivo "%(filename)s"',
|
||||
'Change admin password': 'mudar senha de administrador',
|
||||
'change editor settings': 'mudar definições do editor',
|
||||
'Change Password': 'Trocar Senha',
|
||||
'check all': 'marcar todos',
|
||||
'Check for upgrades': 'checar por atualizações',
|
||||
'Check to delete': 'Marque para apagar',
|
||||
'Checking for upgrades...': 'Buscando atualizações...',
|
||||
'Clean': 'limpar',
|
||||
'click here for online examples': 'clique para ver exemplos online',
|
||||
'click here for the administrative interface': 'Clique aqui para acessar a interface administrativa',
|
||||
'Click row to expand traceback': 'Clique em uma coluna para expandir o log do erro',
|
||||
'click to check for upgrades': 'clique aqui para checar por atualizações',
|
||||
'click to open': 'clique para abrir',
|
||||
'Client IP': 'IP do cliente',
|
||||
'code': 'código',
|
||||
'collapse/expand all': 'colapsar/expandir tudo',
|
||||
'commit (mercurial)': 'commit (mercurial)',
|
||||
'Compile': 'compilar',
|
||||
'compiled application removed': 'aplicação compilada removida',
|
||||
'Controllers': 'Controladores',
|
||||
'controllers': 'controladores',
|
||||
'Count': 'Contagem',
|
||||
'Create': 'criar',
|
||||
'create file with filename:': 'criar um arquivo com o nome:',
|
||||
'Create new application using the Wizard': 'Criar nova aplicação utilizando o assistente',
|
||||
'create new application:': 'nome da nova aplicação:',
|
||||
'Create new simple application': 'Crie uma nova aplicação',
|
||||
'Create/Upload': 'Create/Upload',
|
||||
'created by': 'criado por',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Requisição atual',
|
||||
'Current response': 'Resposta atual',
|
||||
'Current session': 'Sessão atual',
|
||||
'currently running': 'Executando',
|
||||
'currently saved or': 'Atualmente salvo ou',
|
||||
'customize me!': 'Modifique-me',
|
||||
'data uploaded': 'Dados enviados',
|
||||
'database': 'banco de dados',
|
||||
'database %s select': 'Seleção no banco de dados %s',
|
||||
'database administration': 'administração de banco de dados',
|
||||
'Date and Time': 'Data e Hora',
|
||||
'db': 'db',
|
||||
'Debug': 'Debug',
|
||||
'defines tables': 'define as tabelas',
|
||||
'Delete': 'Apague',
|
||||
'delete': 'apagar',
|
||||
'delete all checked': 'apagar marcados',
|
||||
'delete plugin': 'apagar plugin',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
|
||||
'Delete:': 'Apague:',
|
||||
'Deploy': 'publicar',
|
||||
'Deploy on Google App Engine': 'Publicar no Google App Engine',
|
||||
'Deploy to OpenShift': 'Deploy to OpenShift',
|
||||
'Deploy to pythonanywhere': 'Deploy to pythonanywhere',
|
||||
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
|
||||
'Deployment Interface': 'Deployment Interface',
|
||||
'Description': 'Descrição',
|
||||
'design': 'modificar',
|
||||
'DESIGN': 'Projeto',
|
||||
'Design for': 'Projeto de',
|
||||
'Detailed traceback description': 'Detailed traceback description',
|
||||
'details': 'details',
|
||||
'direction: ltr': 'direção: ltr',
|
||||
'Disable': 'Disable',
|
||||
'docs': 'docs',
|
||||
'done!': 'feito!',
|
||||
'download layouts': 'download layouts',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'download plugins': 'download plugins',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'E-mail': 'E-mail',
|
||||
'EDIT': 'EDITAR',
|
||||
'Edit': 'editar',
|
||||
'Edit application': 'Editar aplicação',
|
||||
'edit controller': 'editar controlador',
|
||||
'Edit current record': 'Editar o registro atual',
|
||||
'Edit Profile': 'Editar Perfil',
|
||||
'edit views:': 'editar visões:',
|
||||
'Editing %s': 'A Editar %s',
|
||||
'Editing file': 'Editando arquivo',
|
||||
'Editing file "%s"': 'Editando arquivo "%s"',
|
||||
'Editing Language file': 'Editando arquivo de linguagem',
|
||||
'Email Address': 'Email Address',
|
||||
'Enterprise Web Framework': 'Framework web empresarial',
|
||||
'Error': 'Erro',
|
||||
'Error logs for "%(app)s"': 'Logs de erro para "%(app)s"',
|
||||
'Error snapshot': 'Error snapshot',
|
||||
'Error ticket': 'Error ticket',
|
||||
'Errors': 'erros',
|
||||
'Exception instance attributes': 'Atributos da instancia de excessão',
|
||||
'Exit Fullscreen': 'Sair de Ecrã Inteiro',
|
||||
'Expand Abbreviation (html files only)': 'Expandir Abreviação (só para ficheiros html)',
|
||||
'export as csv file': 'exportar como arquivo CSV',
|
||||
'exposes': 'expõe',
|
||||
'exposes:': 'exposes:',
|
||||
'extends': 'estende',
|
||||
'failed to reload module': 'Falha ao recarregar o módulo',
|
||||
'failed to reload module because:': 'falha ao recarregar o módulo por:',
|
||||
'File': 'Arquivo',
|
||||
'file "%(filename)s" created': 'arquivo "%(filename)s" criado',
|
||||
'file "%(filename)s" deleted': 'arquivo "%(filename)s" apagado',
|
||||
'file "%(filename)s" uploaded': 'arquivo "%(filename)s" enviado',
|
||||
'file "%(filename)s" was not deleted': 'arquivo "%(filename)s" não foi apagado',
|
||||
'file "%s" of %s restored': 'arquivo "%s" de %s restaurado',
|
||||
'file changed on disk': 'arquivo modificado no disco',
|
||||
'file does not exist': 'arquivo não existe',
|
||||
'file saved on %(time)s': 'arquivo salvo em %(time)s',
|
||||
'file saved on %s': 'arquivo salvo em %s',
|
||||
'filter': 'filtro',
|
||||
'Find Next': 'Localizar Seguinte',
|
||||
'Find Previous': 'Localizar Anterior',
|
||||
'First name': 'Nome',
|
||||
'Form has errors': 'Form has errors',
|
||||
'Frames': 'Frames',
|
||||
'Functions with no doctests will result in [passed] tests.': 'Funções sem doctests resultarão em testes [aceitos].',
|
||||
'graph model': 'graph model',
|
||||
'Group ID': 'ID do Grupo',
|
||||
'Hello World': 'Olá Mundo',
|
||||
'Help': 'ajuda',
|
||||
'Hide/Show Translated strings': '',
|
||||
'htmledit': 'htmledit',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Se o relatório acima contém um número de ticket, isso indica uma falha no controlador em execução, antes de tantar executar os doctests. Isto acontece geralmente por erro de endentação ou erro fora do código da função.\r\nO titulo em verde indica que os testes (se definidos) passaram. Neste caso os testes não são mostrados.',
|
||||
'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.': 'if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.',
|
||||
'Import/Export': 'Importar/Exportar',
|
||||
'includes': 'inclui',
|
||||
'insert new': 'inserir novo',
|
||||
'insert new %s': 'inserir novo %s',
|
||||
'inspect attributes': 'inspecionar atributos',
|
||||
'Install': 'instalar',
|
||||
'Installed applications': 'Aplicações instaladas',
|
||||
'internal error': 'erro interno',
|
||||
'Internal State': 'Estado Interno',
|
||||
'Invalid action': 'Ação inválida',
|
||||
'Invalid email': 'E-mail inválido',
|
||||
'invalid password': 'senha inválida',
|
||||
'Invalid Query': 'Consulta inválida',
|
||||
'invalid request': 'solicitação inválida',
|
||||
'invalid ticket': 'ticket inválido',
|
||||
'Keyboard shortcuts': 'Atalhos de teclado',
|
||||
'language file "%(filename)s" created/updated': 'arquivo de linguagem "%(filename)s" criado/atualizado',
|
||||
'Language files (static strings) updated': 'Arquivos de linguagem (textos estáticos) atualizados',
|
||||
'languages': 'linguagens',
|
||||
'Languages': 'Linguagens',
|
||||
'languages updated': 'linguagens atualizadas',
|
||||
'Last name': 'Sobrenome',
|
||||
'Last saved on:': 'Salvo em:',
|
||||
'License for': 'Licença para',
|
||||
'lists by ticket': 'lists by ticket',
|
||||
'Loading...': 'Loading...',
|
||||
'loading...': 'carregando...',
|
||||
'Local Apps': 'Local Apps',
|
||||
'locals': 'locals',
|
||||
'Login': 'Entrar',
|
||||
'login': 'inicio de sessão',
|
||||
'Login successful': 'Login successful',
|
||||
'Login to the Administrative Interface': 'Entrar na interface adminitrativa',
|
||||
'Login/Register': 'Login/Register',
|
||||
'Logout': 'finalizar sessão',
|
||||
'Lost Password': 'Senha perdida',
|
||||
'manage': 'gerenciar',
|
||||
'Manage': 'Manage',
|
||||
'merge': 'juntar',
|
||||
'models': 'modelos',
|
||||
'Models': 'Modelos',
|
||||
'Modules': 'Módulos',
|
||||
'modules': 'módulos',
|
||||
'Name': 'Nome',
|
||||
'new application "%s" created': 'nova aplicação "%s" criada',
|
||||
'New Application Wizard': 'New Application Wizard',
|
||||
'New application wizard': 'Assistente para novas aplicações ',
|
||||
'new plugin installed': 'novo plugin instalado',
|
||||
'New Record': 'Novo registro',
|
||||
'new record inserted': 'novo registro inserido',
|
||||
'New simple application': 'Nova aplicação básica',
|
||||
'next 100 rows': 'próximos 100 registros',
|
||||
'NO': 'NÃO',
|
||||
'No databases in this application': 'Não existem bancos de dados nesta aplicação',
|
||||
'no match': 'não encontrado',
|
||||
'no package selected': 'nenhum pacote selecionado',
|
||||
'No ticket_storage.txt found under /private folder': 'No ticket_storage.txt found under /private folder',
|
||||
'online designer': 'online designer',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Ou Obtenha do URL:',
|
||||
'or import from csv file': 'ou importar de um arquivo CSV',
|
||||
'or provide app url:': 'ou forneça a url de uma aplicação:',
|
||||
'or provide application url:': 'ou forneça a url de uma aplicação:',
|
||||
'Origin': 'Origem',
|
||||
'Original/Translation': 'Original/Tradução',
|
||||
'Overwrite installed app': 'sobrescrever aplicação instalada',
|
||||
'Pack all': 'criar pacote',
|
||||
'Pack compiled': 'criar pacote compilado',
|
||||
'Pack custom': 'Pack custom',
|
||||
'pack plugin': 'empacotar plugin',
|
||||
'PAM authenticated user, cannot change password here': 'usuario autenticado por PAM, não pode alterar a senha por aqui',
|
||||
'Password': 'Senha',
|
||||
'password changed': 'senha alterada',
|
||||
'Peeking at file': 'Visualizando arquivo',
|
||||
'Please wait, giving pythonanywhere a moment...': 'Please wait, giving pythonanywhere a moment...',
|
||||
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" eliminado',
|
||||
'Plugin "%s" in application': 'Plugin "%s" na aplicação',
|
||||
'plugins': 'plugins',
|
||||
'Plugins': 'Plugins',
|
||||
'Plural-Forms:': 'Plural-Forms:',
|
||||
'Powered by': 'Este site utiliza',
|
||||
'previous 100 rows': '100 registros anteriores',
|
||||
'Private files': 'Private files',
|
||||
'private files': 'private files',
|
||||
'PythonAnywhere Apps': 'PythonAnywhere Apps',
|
||||
'PythonAnywhere Password': 'PythonAnywhere Password',
|
||||
'Query:': 'Consulta:',
|
||||
'Rapid Search': 'Rapid Search',
|
||||
'Read': 'Read',
|
||||
'record': 'registro',
|
||||
'record does not exist': 'o registro não existe',
|
||||
'record id': 'id do registro',
|
||||
'Record ID': 'ID do Registro',
|
||||
'Register': 'Registrar-se',
|
||||
'Registration key': 'Chave de registro',
|
||||
'Reload routes': 'Reload routes',
|
||||
'Remove compiled': 'eliminar compilados',
|
||||
'Replace': 'Substituir',
|
||||
'Replace All': 'Substituir Tudo',
|
||||
'request': 'request',
|
||||
'requires python-git, but not installed': 'requires python-git, but not installed',
|
||||
'Resolve Conflict file': 'Arquivo de resolução de conflito',
|
||||
'response': 'response',
|
||||
'restore': 'restaurar',
|
||||
'revert': 'reverter',
|
||||
'Role': 'Papel',
|
||||
'Rows in table': 'Registros na tabela',
|
||||
'Rows selected': 'Registros selecionados',
|
||||
'rules are not defined': 'rules are not defined',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
|
||||
'Running on %s': 'A correr em %s',
|
||||
'Save': 'Save',
|
||||
'save': 'salvar',
|
||||
'Save file:': 'Gravar ficheiro:',
|
||||
'Save file: %s': 'Gravar ficheiro: %s',
|
||||
'Save via Ajax': 'Gravar via Ajax',
|
||||
'Saved file hash:': 'Hash do arquivo salvo:',
|
||||
'selected': 'selecionado(s)',
|
||||
'session': 'session',
|
||||
'session expired': 'sessão expirada',
|
||||
'shell': 'Terminal',
|
||||
'Site': 'site',
|
||||
'some files could not be removed': 'alguns arquicos não puderam ser removidos',
|
||||
'Something went wrong please wait a few minutes before retrying': 'Something went wrong please wait a few minutes before retrying',
|
||||
'source : filesystem': 'source : filesystem',
|
||||
'Start a new app': 'Start a new app',
|
||||
'Start searching': 'Start searching',
|
||||
'Start wizard': 'iniciar assistente',
|
||||
'state': 'estado',
|
||||
'Static': 'Static',
|
||||
'static': 'estáticos',
|
||||
'Static files': 'Arquivos estáticos',
|
||||
'Submit': 'Submit',
|
||||
'submit': 'enviar',
|
||||
'Sure you want to delete this object?': 'Tem certeza que deseja apaagr este objeto?',
|
||||
'switch to : db': 'switch to : db',
|
||||
'table': 'tabela',
|
||||
'Table name': 'Nome da tabela',
|
||||
'test': 'testar',
|
||||
'Testing application': 'Testando a aplicação',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "consulta" é uma condição como "db.tabela.campo1==\'valor\'". Algo como "db.tabela1.campo1==db.tabela2.campo2" resulta em um JOIN SQL.',
|
||||
'the application logic, each URL path is mapped in one exposed function in the controller': 'A lógica da aplicação, cada URL é mapeada para uma função exposta pelo controlador',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
|
||||
'the data representation, define database tables and sets': 'A representação dos dadps, define tabelas e estruturas de dados',
|
||||
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
|
||||
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
|
||||
'the presentations layer, views are also known as templates': 'A camada de apresentação, As visões também são chamadas de templates',
|
||||
'There are no controllers': 'Não existem controllers',
|
||||
'There are no models': 'Não existem modelos',
|
||||
'There are no modules': 'Não existem módulos',
|
||||
'There are no plugins': 'There are no plugins',
|
||||
'There are no private files': '',
|
||||
'There are no static files': 'Não existem arquicos estáticos',
|
||||
'There are no translators, only default language is supported': 'Não há traduções, somente a linguagem padrão é suportada',
|
||||
'There are no views': 'Não existem visões',
|
||||
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
|
||||
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
|
||||
'these files are served without processing, your images go here': 'Estes arquivos são servidos sem processamento, suas imagens ficam aqui',
|
||||
'This is the %(filename)s template': 'Este é o template %(filename)s',
|
||||
'Ticket': 'Ticket',
|
||||
'Ticket ID': 'Ticket ID',
|
||||
'Timestamp': 'Data Atual',
|
||||
'TM': 'MR',
|
||||
'to previous version.': 'para a versão anterior.',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'Para criar um plugin, nomeio um arquivo/pasta como plugin_[nome]',
|
||||
'toggle breakpoint': 'toggle breakpoint',
|
||||
'Toggle comment': 'Toggle comment',
|
||||
'Toggle Fullscreen': 'Toggle Fullscreen',
|
||||
'Traceback': 'Traceback',
|
||||
'translation strings for the application': 'textos traduzidos para a aplicação',
|
||||
'Translation strings for the application': 'Translation strings for the application',
|
||||
'try': 'tente',
|
||||
'try something like': 'tente algo como',
|
||||
'Try the mobile interface': 'Try the mobile interface',
|
||||
'Unable to check for upgrades': 'Não é possível checar as atualizações',
|
||||
'unable to create application "%s"': 'não é possível criar a aplicação "%s"',
|
||||
'unable to delete file "%(filename)s"': 'não é possível criar o arquico "%(filename)s"',
|
||||
'unable to delete file plugin "%(plugin)s"': 'não é possível criar o plugin "%(plugin)s"',
|
||||
'Unable to download': 'Não é possível efetuar o download',
|
||||
'Unable to download app': 'Não é possível baixar a aplicação',
|
||||
'Unable to download app because:': 'Não é possível baixar a aplicação porque:',
|
||||
'Unable to download because': 'Não é possível baixar porque',
|
||||
'unable to parse csv file': 'não é possível analisar o arquivo CSV',
|
||||
'unable to uninstall "%s"': 'não é possível instalar "%s"',
|
||||
'unable to upgrade because "%s"': 'não é possível atualizar porque "%s"',
|
||||
'uncheck all': 'desmarcar todos',
|
||||
'Uninstall': 'desinstalar',
|
||||
'update': 'atualizar',
|
||||
'update all languages': 'atualizar todas as linguagens',
|
||||
'Update:': 'Atualizar:',
|
||||
'upgrade now to %s': 'upgrade now to %s',
|
||||
'upgrade web2py now': 'atualize o web2py agora',
|
||||
'upload': 'upload',
|
||||
'Upload': 'Upload',
|
||||
'Upload & install packed application': 'Faça upload e instale uma aplicação empacotada',
|
||||
'Upload a package:': 'Faça upload de um pacote:',
|
||||
'Upload and install packed application': 'Upload and install packed application',
|
||||
'upload application:': 'Fazer upload de uma aplicação:',
|
||||
'Upload existing application': 'Faça upload de uma aplicação existente',
|
||||
'upload file:': 'Enviar arquivo:',
|
||||
'upload plugin file:': 'Enviar arquivo de plugin:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para criar consultas mais complexas.',
|
||||
'Use an url:': 'Use uma url:',
|
||||
'User ID': 'ID do Usuario',
|
||||
'Username': 'Username',
|
||||
'variables': 'variáveis',
|
||||
'Version': 'Versão',
|
||||
'versioning': 'versionamento',
|
||||
'Versioning': 'Versioning',
|
||||
'view': 'visão',
|
||||
'Views': 'Visões',
|
||||
'views': 'visões',
|
||||
'Warning!': 'Warning!',
|
||||
'Web Framework': 'Web Framework',
|
||||
'web2py Admin Password': 'web2py Admin Password',
|
||||
'web2py is up to date': 'web2py está atualizado',
|
||||
'web2py Recent Tweets': 'Tweets Recentes de @web2py',
|
||||
'web2py upgraded; please restart it': 'web2py atualizado; favor reiniciar',
|
||||
'Welcome to web2py': 'Bem-vindo ao web2py',
|
||||
'YES': 'SIM',
|
||||
'You only need these if you have already registered': 'You only need these if you have already registered',
|
||||
}
|
||||
|
||||
@@ -1,579 +0,0 @@
|
||||
/*=============================================================
|
||||
GENERAL
|
||||
==============================================================*/
|
||||
html,body{height:auto;background:transparent;}
|
||||
/*=============================================================
|
||||
CONTROLS
|
||||
==============================================================*/
|
||||
label,
|
||||
input,
|
||||
button,
|
||||
select,
|
||||
textarea,
|
||||
button.btn
|
||||
{
|
||||
font-size:13px;
|
||||
font-weight:normal;
|
||||
line-height:18px;
|
||||
}
|
||||
textarea,
|
||||
select
|
||||
{
|
||||
margin-bottom:9px;
|
||||
}
|
||||
select,
|
||||
/*textarea,*/
|
||||
input[type="text"],
|
||||
input[type="password"],
|
||||
input[type="datetime"],
|
||||
input[type="datetime-local"],
|
||||
input[type="date"],
|
||||
input[type="month"],
|
||||
input[type="time"],
|
||||
input[type="week"],
|
||||
input[type="number"],
|
||||
input[type="email"],
|
||||
input[type="url"],
|
||||
input[type="search"],
|
||||
input[type="tel"],
|
||||
input[type="color"],
|
||||
.uneditable-input,
|
||||
a.btn-lnk
|
||||
{
|
||||
height:18px;
|
||||
padding:4px;
|
||||
font-size:13px;
|
||||
line-height:18px;
|
||||
}
|
||||
.design h3,
|
||||
.plugin h3
|
||||
{
|
||||
background-position:0 2px;
|
||||
}
|
||||
|
||||
select,
|
||||
input[type="file"]
|
||||
{
|
||||
height:28px;
|
||||
line-height:28px;
|
||||
}
|
||||
input[type="submit"],
|
||||
input[type="button"]
|
||||
{
|
||||
font-size:13px;
|
||||
height:28px;
|
||||
line-height:18px;
|
||||
padding:4px 10px;
|
||||
}
|
||||
input[type="radio"],
|
||||
input[type="checkbox"]
|
||||
{
|
||||
margin-top:2px;
|
||||
}
|
||||
.button.btn
|
||||
{
|
||||
line-height:1.25em;
|
||||
font-size:inherit;
|
||||
border:none;
|
||||
text-shadow:none;
|
||||
margin-bottom:0px;
|
||||
-webkit-border-radius:0px;
|
||||
-moz-border-radius:0px;
|
||||
border-radius:0px;
|
||||
-webkit-box-shadow:none;
|
||||
-moz-box-shadow:none;
|
||||
box-shadow:none);
|
||||
}
|
||||
.button.btn:hover
|
||||
{
|
||||
background-color:transparent;
|
||||
-webkit-transition: background-position 0s linear;
|
||||
-moz-transition: background-position 0s linear;
|
||||
-o-transition: background-position 0s linear;
|
||||
transition: background-position 0s linear;
|
||||
}
|
||||
form label
|
||||
{
|
||||
font-weight:bold;
|
||||
}
|
||||
.help
|
||||
{
|
||||
border-color:transparent;
|
||||
}
|
||||
/* tree menu */
|
||||
.folder
|
||||
{
|
||||
border:none;
|
||||
}
|
||||
.folder>i
|
||||
{
|
||||
display:none;
|
||||
}
|
||||
.celled
|
||||
{
|
||||
padding-top: 2px;
|
||||
}
|
||||
.celled-one
|
||||
{
|
||||
padding-top: 1px;
|
||||
}
|
||||
|
||||
.test h3
|
||||
{
|
||||
border:0;
|
||||
padding-left:18px;
|
||||
}
|
||||
/*=============================================================
|
||||
FLASH MESSAGEBOX
|
||||
==============================================================*/
|
||||
.flash
|
||||
{
|
||||
position:fixed;
|
||||
width:50%;
|
||||
top:49px;
|
||||
left:25%;
|
||||
right:25%;
|
||||
cursor:default;
|
||||
text-align:center;
|
||||
padding:8px 35px 8px 14px;
|
||||
z-index:5620;
|
||||
}
|
||||
.flash>.close
|
||||
{
|
||||
color:inherit;
|
||||
opacity:0.7;
|
||||
}
|
||||
.flash>.close:hover
|
||||
{
|
||||
opacity:0.9;
|
||||
}
|
||||
/*=============================================================
|
||||
NAVBAR
|
||||
==============================================================*/
|
||||
.navbar-fixed-top .navbar-inner,
|
||||
.navbar-static-top .navbar-inner
|
||||
{
|
||||
/* in place of shadow image */
|
||||
-webkit-box-shadow:0px 10px 20px rgba(195,195,195,1.0);
|
||||
-moz-box-shadow: 0px 10px 20px rgba(195,195,195,1.0);
|
||||
box-shadow: 0px 10px 20px rgba(195,195,195,1.0);
|
||||
//zoom:1; /* IE6-9 */
|
||||
filter:progid:DXImageTransform.Microsoft.DropShadow(OffX=0, OffY=10, Color=#000000); /* IE6-9 */
|
||||
padding:0;
|
||||
}
|
||||
.navbar-inverse .navbar-inner
|
||||
{
|
||||
min-height:33px; /* required - override */
|
||||
height:33px;
|
||||
filter:progid:DXImageTransform.Microsoft.gradient(enabled=false); /* IE6-9 */
|
||||
background:#292929 url(../images/header_bg.png) repeat-x;
|
||||
border:none;
|
||||
}
|
||||
#header
|
||||
{
|
||||
background:transparent;
|
||||
}
|
||||
#header.navbar
|
||||
{
|
||||
overflow:visible;
|
||||
}
|
||||
.navbar-inverse .nav > li > a
|
||||
{
|
||||
padding:0;
|
||||
line-height:1.25;
|
||||
text-shadow:none;
|
||||
}
|
||||
.navbar .btn-navbar
|
||||
{
|
||||
padding:4px;
|
||||
margin:5px 5px 0 5px;
|
||||
}
|
||||
#menu{margin-right:-7px;}
|
||||
/*=============================================================
|
||||
FOOTER
|
||||
==============================================================*/
|
||||
#footer
|
||||
{
|
||||
padding-bottom:0;
|
||||
}
|
||||
/*=============================================================
|
||||
MAIN
|
||||
==============================================================*/
|
||||
#main
|
||||
{
|
||||
position:static;
|
||||
padding-top:0;
|
||||
padding-bottom:0;
|
||||
}
|
||||
/*=============================================================
|
||||
SIDEBAR
|
||||
==============================================================*/
|
||||
.sidebar_inner
|
||||
{
|
||||
background:transparent;
|
||||
padding:0;
|
||||
min-width:auto;
|
||||
}
|
||||
.sidebar .box {
|
||||
border-top:1px solid #EEE;
|
||||
}
|
||||
/*=============================================================
|
||||
WIZARD
|
||||
==============================================================*/
|
||||
.step div.help li
|
||||
{
|
||||
line-height:inherit;
|
||||
}
|
||||
.ms-container .ms-selectable li.ms-elem-selectable,
|
||||
.ms-container .ms-selection li.ms-elem-selected
|
||||
{
|
||||
font-size:13px;
|
||||
}
|
||||
.input-append a.btn
|
||||
{
|
||||
padding:4px;
|
||||
height:18px;
|
||||
font-size:13px;
|
||||
line-height:18px;
|
||||
}
|
||||
/*=============================================================
|
||||
ERRORS TABLE
|
||||
==============================================================*/
|
||||
.errors .table th
|
||||
{
|
||||
filter:progid:DXImageTransform.Microsoft.gradient(enabled=false); /* IE6-9 */
|
||||
}
|
||||
|
||||
.tablebar span.help
|
||||
{
|
||||
font-weight:normal;
|
||||
line-height:1.25em;
|
||||
text-shadow:none;
|
||||
width:auto;
|
||||
}
|
||||
/*=============================================================
|
||||
TOOLTIP
|
||||
==============================================================*/
|
||||
.tooltip.in
|
||||
{
|
||||
opacity:1;
|
||||
filter:alpha(opacity=100);
|
||||
}
|
||||
.tooltip-inner
|
||||
{
|
||||
opacity:1;
|
||||
text-align:left;
|
||||
background:#9fb364;
|
||||
color:#eef1d9;
|
||||
border:1px solid #eef1d9;
|
||||
font-style:italic;
|
||||
padding:0.3em;
|
||||
-moz-border-radius:0.5em;
|
||||
border-radius:0.5em;
|
||||
font-size:13px;
|
||||
text-transform:none;
|
||||
}
|
||||
.tooltip.right .tooltip-arrow,
|
||||
.tooltip.left .tooltip-arrow
|
||||
{
|
||||
border-color:transparent;
|
||||
}
|
||||
|
||||
/*=============================================================
|
||||
THE GRID
|
||||
==============================================================*/
|
||||
.w2p_grid_bottom_bar .w2p_export_menu
|
||||
{
|
||||
line-height:18px;
|
||||
margin-left:0;
|
||||
}
|
||||
.w2p_export_menu .dropdown-toggle
|
||||
{
|
||||
cursor:pointer;
|
||||
margin:0;
|
||||
padding:0;
|
||||
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(white), to(#E6E6E6));
|
||||
background-image: -webkit-linear-gradient(top, white, #E6E6E6);
|
||||
background-image: -o-linear-gradient(top, white, #E6E6E6);
|
||||
background-image: linear-gradient(to bottom, white, #E6E6E6);
|
||||
background-image: -moz-linear-gradient(top, white, #E6E6E6);
|
||||
}
|
||||
.w2p_export_menu ul
|
||||
{
|
||||
margin-top:2px;
|
||||
display:none;
|
||||
}
|
||||
.w2p_export_menu li
|
||||
{
|
||||
display:list-item;
|
||||
margin:0;
|
||||
}
|
||||
div.web2py_grid
|
||||
{
|
||||
font-size:13px;
|
||||
line-height:18px;
|
||||
}
|
||||
.web2py_grid a.btn
|
||||
{
|
||||
font-size:13px;
|
||||
line-height:18px;
|
||||
padding:4px 10px;
|
||||
margin-left:0;
|
||||
margin-right:4px;
|
||||
|
||||
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), to(#e6e6e6));
|
||||
background-image: -webkit-linear-gradient(top, #ffffff, #e6e6e6);
|
||||
background-image: -o-linear-gradient(top, #ffffff, #e6e6e6);
|
||||
background-image: linear-gradient(to bottom, #ffffff, #e6e6e6);
|
||||
background-image: -moz-linear-gradient(top, #ffffff, #e6e6e6);
|
||||
}
|
||||
.web2py_grid .input-append .btn
|
||||
{
|
||||
padding:4px 10px;
|
||||
margin-right:0;
|
||||
font-family:inherit;
|
||||
color:#333;
|
||||
text-shadow:0 1px 1px rgba(255, 255, 255, 0.75);
|
||||
border:1px solid #c5c5c5;
|
||||
}
|
||||
.web2py_grid select:focus
|
||||
{
|
||||
border-color:rgba(232,149,60,0.8);
|
||||
outline:0;
|
||||
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(232, 149, 60, 0.6);
|
||||
-moz-box-shadow: inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(232,149,60,0.6);
|
||||
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(232, 149, 60, 0.6);
|
||||
}
|
||||
.web2py_console input[type="button"],
|
||||
.web2py_grid .row_buttons a.btn
|
||||
{
|
||||
color:#333;
|
||||
line-height:18px;
|
||||
padding:4px 10px;
|
||||
text-shadow:rgba(255, 255, 255, 0.74902) 0px 1px 1px;
|
||||
border-color:rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.25);
|
||||
-webkit-border-radius: 4px;
|
||||
-moz-border-radius: 4px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
.web2py_console input[type="button"]:hover,
|
||||
.web2py_grid .row_buttons a.btn:hover
|
||||
{
|
||||
color:#333;
|
||||
border-color:rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.15) rgba(0, 0, 0, 0.25);
|
||||
background:#E6E6E6;
|
||||
background-position: 0 -15px !important;
|
||||
-webkit-transition: background-position .1s linear;
|
||||
-moz-transition: background-position .1s linear;
|
||||
-o-transition: background-position .1s linear;
|
||||
transition: background-position .1s linear;
|
||||
}
|
||||
.web2py_table
|
||||
{
|
||||
border:none;
|
||||
}
|
||||
.web2py_table table
|
||||
{
|
||||
/*table-layout:fixed;*/
|
||||
margin-bottom:4px;
|
||||
}
|
||||
.web2py_table table td
|
||||
{
|
||||
/*word-wrap:break-word;*/ /*uncomment when "table-layout:fixed" is applied */
|
||||
}
|
||||
|
||||
.web2py_grid thead th
|
||||
{
|
||||
background-color:transparent;
|
||||
padding:4px 5px;
|
||||
line-height:18px;
|
||||
vertical-align:bottom;
|
||||
border-right:0;
|
||||
border-bottom:0;
|
||||
word-wrap:break-word;
|
||||
}
|
||||
.web2py_grid .btn-group > .dropdown-menu
|
||||
{
|
||||
font-size:13px;
|
||||
}
|
||||
.web2py_grid .dropdown-menu li > a:hover,
|
||||
.web2py_grid .dropdown-menu li > a:focus
|
||||
{
|
||||
filter:progid:DXImageTransform.Microsoft.gradient(enabled=false); /* IE6-9 */
|
||||
background-image:none;
|
||||
background-color:#E8953C;
|
||||
}
|
||||
.pagination
|
||||
{
|
||||
margin:0;
|
||||
height:30px;
|
||||
}
|
||||
.pagination ul > li > a
|
||||
{
|
||||
line-height:28px;
|
||||
}
|
||||
|
||||
#w2p_grid_addbtn:focus,
|
||||
#w2p_search-form :focus,
|
||||
.btn:focus
|
||||
{
|
||||
outline:none;
|
||||
}
|
||||
.web2py_console input[type="button"]:focus,
|
||||
.web2py_grid .row_buttons a.btn:focus
|
||||
{
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.15) inset, 0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
div.web2py_counter.span6
|
||||
{
|
||||
min-height:20px;
|
||||
}
|
||||
.web2py_paginator
|
||||
{
|
||||
border:0;
|
||||
margin:0;
|
||||
padding:0;
|
||||
background-color:transparent;
|
||||
}
|
||||
.web2py_paginator ul li a
|
||||
{
|
||||
margin-right:0;
|
||||
padding:0 14px;
|
||||
border:1px solid #DDD;
|
||||
border-left-width:0;
|
||||
color:#E8953C;
|
||||
}
|
||||
.web2py_paginator ul li a:hover
|
||||
{
|
||||
background: whiteSmoke;
|
||||
border: 1px solid #DDD;
|
||||
border-left-width:0;
|
||||
color:#e2821b;
|
||||
}
|
||||
.web2py_paginator ul li:first-child a,
|
||||
.web2py_paginator ul li:first-child a:hover
|
||||
{
|
||||
border-left-width:1px;
|
||||
}
|
||||
.web2py_paginator .current
|
||||
{
|
||||
font-weight:normal;
|
||||
}
|
||||
.web2py_paginator ul li.current a:hover
|
||||
{
|
||||
color:#999;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.editor-bar-column a[name="save"]
|
||||
{
|
||||
background-color: whiteSmoke;
|
||||
background-image: -webkit-gradient(linear,0 0,0 100%,from(white),to(#E6E6E6));
|
||||
background-image: -webkit-linear-gradient(top,white,#E6E6E6);
|
||||
background-image: -o-linear-gradient(top,white,#E6E6E6);
|
||||
background-image: linear-gradient(to bottom,white,#E6E6E6);
|
||||
background-image: -moz-linear-gradient(top,white,#E6E6E6);
|
||||
background-repeat: repeat-x;
|
||||
padding:2px 6px;
|
||||
font-size:11px;
|
||||
line-height:17px;
|
||||
margin:0;
|
||||
}
|
||||
.editor-bar-column a[name="save"]:hover
|
||||
{
|
||||
background-color: #E6E6E6;
|
||||
background-position: 0 -15px;
|
||||
-webkit-transition: background-position .1s linear;
|
||||
-moz-transition: background-position .1s linear;
|
||||
-o-transition: background-position .1s linear;
|
||||
transition: background-position .1s linear;
|
||||
}
|
||||
.keybindings
|
||||
{
|
||||
padding:0 18px 10px;
|
||||
}
|
||||
.keybindings li
|
||||
{
|
||||
margin-bottom:0;
|
||||
}
|
||||
|
||||
/*----- translate page ---*/
|
||||
|
||||
.languageform input
|
||||
{
|
||||
margin-bottom:0;
|
||||
}
|
||||
.languageform div
|
||||
{
|
||||
margin-bottom:9px;
|
||||
}
|
||||
.languageform input.untranslated
|
||||
{
|
||||
background-color:#FC0;
|
||||
}
|
||||
|
||||
.step #wizard_nav .first-box
|
||||
{
|
||||
padding-top:0;
|
||||
}
|
||||
|
||||
/*=============================================================
|
||||
MEDIA QUERIES
|
||||
==============================================================*/
|
||||
@media (max-width: 979px)
|
||||
{
|
||||
/*-----------------------------------
|
||||
Navbar
|
||||
-------------------------------------*/
|
||||
#header .navbar-inner
|
||||
{
|
||||
padding:0;
|
||||
}
|
||||
/*collapsed menu*/
|
||||
.navbar .nav-collapse .nav
|
||||
{
|
||||
background:#222;
|
||||
padding:8px 2px 8px 8px;
|
||||
-webkit-border-bottom-right-radius:8px;
|
||||
-webkit-border-bottom-left-radius:8px;
|
||||
-moz-border-radius-bottomright:8px;
|
||||
-moz-border-radius-bottomleft:8px;
|
||||
border-bottom-right-radius:8px;
|
||||
border-bottom-left-radius:8px;
|
||||
}
|
||||
#menu
|
||||
{
|
||||
margin-right:0;
|
||||
}
|
||||
#menu li
|
||||
{
|
||||
float:none;
|
||||
}
|
||||
#menu a.button,
|
||||
#menu a.button span
|
||||
{
|
||||
background-image:url(../images/menu_responsive.png);
|
||||
}
|
||||
#menu a.button
|
||||
{
|
||||
padding:0 1em 0 0;
|
||||
}
|
||||
}
|
||||
@media(max-width:632px)
|
||||
{
|
||||
/*-----------------------------------
|
||||
footer
|
||||
-------------------------------------*/
|
||||
#footer
|
||||
{
|
||||
height:auto;
|
||||
}
|
||||
|
||||
#footer select
|
||||
{
|
||||
margin-top:8px;
|
||||
}
|
||||
}
|
||||
+1
-1
@@ -476,7 +476,7 @@ h4.editableapp { background: #fff url(../images/folder.png) no-repeat; }
|
||||
|
||||
h4.currentapp { background: #fff url(../images/folder_locked.png) no-repeat; }
|
||||
|
||||
.flash { position:fixed; width:50%; top:49px; left:25%; right:25%; cursor:default; text-align:center; z-index:5620; }
|
||||
.w2p_flash { position:fixed; width:50%; top:49px; left:25%; right:25%; cursor:default; text-align:center; z-index:5620; }
|
||||
span#closeflash {position:absolute; top:1px; right:-1px; font-size:150%; border:1px solid black; border-color: transparent transparent #fbeed5 #fbeed5; border-radius: 0 0 0 4px; width:22px; }
|
||||
span#closeflash:hover {font-weight:bold; cursor:pointer; }
|
||||
|
||||
|
||||
@@ -1,322 +0,0 @@
|
||||
/** these MUST stay **/
|
||||
a {text-decoration:none; white-space:nowrap}
|
||||
a:hover {text-decoration:underline}
|
||||
a.button {text-decoration:none}
|
||||
h1,h2,h3,h4,h5,h6 {margin:0.5em 0 0.25em 0; display:block;
|
||||
font-family:Helvetica}
|
||||
h1 {font-size:4.00em}
|
||||
h2 {font-size:3.00em}
|
||||
h3 {font-size:2.00em}
|
||||
h4 {font-size:1.50em}
|
||||
h5 {font-size:1.25em}
|
||||
h6 {font-size:1.12em}
|
||||
th,label {font-weight:bold; white-space:nowrap;}
|
||||
td,th {text-align:left; padding:2px 5px 2px 5px}
|
||||
th {vertical-align:middle; border-right:1px solid white}
|
||||
td {vertical-align:top}
|
||||
form table tr td label {text-align:left}
|
||||
p,table,ol,ul {padding:0; margin: 0.75em 0}
|
||||
p {text-align:justify}
|
||||
ol, ul {list-style-position:outside; margin-left:2em}
|
||||
li {margin-bottom:0.5em}
|
||||
span,input,select,textarea,button,label,a {display:inline}
|
||||
img {border:0}
|
||||
blockquote,blockquote p,p blockquote {
|
||||
font-style:italic; margin:0.5em 30px 0.5em 30px; font-size:0.9em}
|
||||
i,em {font-style:italic}
|
||||
strong {font-weight:bold}
|
||||
small {font-size:0.8em}
|
||||
code {font-family:Courier}
|
||||
textarea {width:100%}
|
||||
video {width:400px}
|
||||
audio {width:200px}
|
||||
[type="text"], [type="password"], select {
|
||||
margin-right: 5px; width: 300px;
|
||||
}
|
||||
.hidden {display:none;visibility:visible}
|
||||
.right {float:right; text-align:right}
|
||||
.left {float:left; text-align:left}
|
||||
.center {width:100%; text-align:center; vertical-align:middle}
|
||||
/** end **/
|
||||
|
||||
/* Sticky footer begin */
|
||||
|
||||
.main {
|
||||
padding:20px 0 50px 0;
|
||||
}
|
||||
|
||||
.footer,.push {
|
||||
height:6em;
|
||||
padding:1em 0;
|
||||
clear:both;
|
||||
}
|
||||
|
||||
.footer-content {position:relative; bottom:-4em; width:100%}
|
||||
|
||||
.auth_navbar {
|
||||
white-space:nowrap;
|
||||
}
|
||||
|
||||
/* Sticky footer end */
|
||||
|
||||
.footer {
|
||||
border-top:1px #DEDEDE solid;
|
||||
}
|
||||
.header {
|
||||
/* background:<fill here for header image>; */
|
||||
}
|
||||
|
||||
|
||||
fieldset {padding:16px; border-top:1px #DEDEDE solid}
|
||||
fieldset legend {text-transform:uppercase; font-weight:bold; padding:4px 16px 4px 16px; background:#f1f1f1}
|
||||
|
||||
/* fix ie problem with menu */
|
||||
|
||||
td.w2p_fw {padding-bottom:1px}
|
||||
td.w2p_fl,td.w2p_fw,td.w2p_fc {vertical-align:top}
|
||||
td.w2p_fl {text-align:left}
|
||||
td.w2p_fl, td.w2p_fw {padding-right:7px}
|
||||
td.w2p_fl,td.w2p_fc {padding-top:4px}
|
||||
div.w2p_export_menu {margin:5px 0}
|
||||
div.w2p_export_menu a, div.w2p_wiki_tags a, div.w2p_cloud a {margin-left:5px; padding:2px 5px; background-color:#f1f1f1; border-radius:5px; -moz-border-radius:5px; -webkit-border-radius:5px;}
|
||||
|
||||
/* tr#submit_record__row {border-top:1px solid #E5E5E5} */
|
||||
#submit_record__row td {padding-top:.5em}
|
||||
|
||||
/* Fix */
|
||||
#auth_user_remember__row label {display:inline}
|
||||
#web2py_user_form td {vertical-align:top}
|
||||
|
||||
/*********** web2py specific ***********/
|
||||
div.flash {
|
||||
font-weight:bold;
|
||||
display:none;
|
||||
position:fixed;
|
||||
padding:10px;
|
||||
top:48px;
|
||||
right:250px;
|
||||
min-width:280px;
|
||||
opacity:0.95;
|
||||
margin:0px 0px 10px 10px;
|
||||
vertical-align:middle;
|
||||
cursor:pointer;
|
||||
color:#fff;
|
||||
background-color:#000;
|
||||
border:2px solid #fff;
|
||||
border-radius:8px;
|
||||
-o-border-radius: 8px;
|
||||
-moz-border-radius:8px;
|
||||
-webkit-border-radius:8px;
|
||||
background-image: -webkit-linear-gradient(top,#222,#000);
|
||||
background-image: -o-linear-gradient(top,#222,#000);
|
||||
background-image: -moz-linear-gradient(90deg, #222, #000);
|
||||
background-image: linear-gradient(top,#222,#000);
|
||||
background-repeat: repeat-x;
|
||||
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
z-index:2000;
|
||||
}
|
||||
|
||||
div.flash #closeflash{color:inherit; float:right; margin-left:15px;}
|
||||
.ie-lte7 div.flash #closeflash
|
||||
{color:expression(this.parentNode.currentStyle['color']);float:none;position:absolute;right:4px;}
|
||||
|
||||
div.flash:hover { opacity:0.25; }
|
||||
|
||||
div.error_wrapper {display:block}
|
||||
div.error {
|
||||
width: 298px;
|
||||
background:red;
|
||||
border: 2px solid #d00;
|
||||
color:white;
|
||||
padding:5px;
|
||||
display:inline-block;
|
||||
background-image: -webkit-linear-gradient(left,#f00,#fdd);
|
||||
background-image: -o-linear-gradient(left,#f00,#fdd);
|
||||
background-image: -moz-linear-gradient(0deg, #f00, #fdd);
|
||||
background-image: linear-gradient(left,#f00,#fdd);
|
||||
background-repeat: repeat-y;
|
||||
}
|
||||
|
||||
.topbar {
|
||||
padding:10px 0;
|
||||
width:100%;
|
||||
color:#959595;
|
||||
vertical-align:middle;
|
||||
padding:auto;
|
||||
background-image:-khtml-gradient(linear,left top,left bottom,from(#333333),to(#222222));
|
||||
background-image:-moz-linear-gradient(top,#333333,#222222);
|
||||
background-image:-ms-linear-gradient(top,#333333,#222222);
|
||||
background-image:-webkit-gradient(linear,left top,left bottom,color-stop(0%,#333333),color-stop(100%,#222222));
|
||||
background-image:-webkit-linear-gradient(top,#333333,#222222);
|
||||
background-image:-o-linear-gradient(top,#333333,#222222);
|
||||
background-image:linear-gradient(top,#333333,#222222);
|
||||
filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#333333',endColorstr='#222222',GradientType=0);
|
||||
-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.25),inset 0 -1px 0 rgba(0,0,0,0.1);
|
||||
-moz-box-shadow:0 1px 3px rgba(0,0,0,0.25),inset 0 -1px 0 rgba(0,0,0,0.1);
|
||||
box-shadow:0 1px 3px rgba(0,0,0,0.25),inset 0 -1px 0 rgba(0,0,0,0.1);
|
||||
}
|
||||
|
||||
.topbar a {
|
||||
color:#e1e1e1;
|
||||
}
|
||||
|
||||
#navbar {float:right; padding:5px; /* same as superfish */}
|
||||
|
||||
.statusbar {
|
||||
background-color:#F5F5F5;
|
||||
margin-top:1em;
|
||||
margin-bottom:1em;
|
||||
padding:.5em 1em;
|
||||
border:1px solid #ddd;
|
||||
border-radius:5px;
|
||||
-moz-border-radius:5px;
|
||||
-webkit-border-radius:5px;
|
||||
}
|
||||
|
||||
.breadcrumbs {float:left}
|
||||
|
||||
.copyright {float:left}
|
||||
#poweredBy {float:right}
|
||||
|
||||
/* #MEDIA QUERIES SECTION */
|
||||
|
||||
/*
|
||||
*Grid
|
||||
*
|
||||
* The default style for SQLFORM.grid even using jquery-iu or another ui framework
|
||||
* will look better with the declarations below
|
||||
* if needed to remove base.css consider keeping these following lines in some css file.
|
||||
*/
|
||||
/* .web2py_table {border:1px solid #ccc} */
|
||||
.web2py_paginator {}
|
||||
.web2py_grid {width:100%}
|
||||
.web2py_grid table {width:100%}
|
||||
.web2py_grid tbody td {padding:2px 5px 2px 5px; vertical-align: middle;}
|
||||
.web2py_grid .web2py_form td {vertical-align: top;}
|
||||
|
||||
.web2py_grid thead th,.web2py_grid tfoot td {
|
||||
background-color:#EAEAEA;
|
||||
padding:10px 5px 10px 5px;
|
||||
}
|
||||
|
||||
.web2py_grid tr.odd {background-color:#F9F9F9}
|
||||
.web2py_grid tr:hover {background-color:#F5F5F5}
|
||||
|
||||
/*
|
||||
.web2py_breadcrumbs a {
|
||||
line-height:20px; margin-right:5px; display:inline-block;
|
||||
padding:3px 5px 3px 5px;
|
||||
font-family:'lucida grande',tahoma,verdana,arial,sans-serif;
|
||||
color:#3C3C3D;
|
||||
text-shadow:1px 1px 0 #FFFFFF;
|
||||
white-space:nowrap; overflow:visible; cursor:pointer;
|
||||
background:#ECECEC;
|
||||
border:1px solid #CACACA;
|
||||
-webkit-border-radius:2px; -moz-border-radius:2px;
|
||||
-webkit-background-clip:padding-box; border-radius:2px;
|
||||
outline:none; position:relative; zoom:1; *display:inline;
|
||||
}
|
||||
*/
|
||||
|
||||
.web2py_console form {
|
||||
width: 100%;
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
margin: 0 0 0 5px;
|
||||
}
|
||||
|
||||
.web2py_console form select {
|
||||
margin:0;
|
||||
}
|
||||
|
||||
.web2py_search_actions {
|
||||
float:left;
|
||||
text-align:left;
|
||||
}
|
||||
|
||||
.web2py_grid .row_buttons {
|
||||
min-height:25px;
|
||||
vertical-align:middle;
|
||||
}
|
||||
.web2py_grid .row_buttons a {
|
||||
margin:3px;
|
||||
}
|
||||
|
||||
.web2py_search_actions {
|
||||
width:100%;
|
||||
}
|
||||
|
||||
.web2py_grid .row_buttons a,
|
||||
.web2py_paginator ul li a,
|
||||
.web2py_search_actions a,
|
||||
.web2py_console input[type=submit],
|
||||
.web2py_console input[type=button],
|
||||
.web2py_console button {
|
||||
line-height:20px;
|
||||
margin-right:2px; display:inline-block;
|
||||
padding:3px 5px 3px 5px;
|
||||
}
|
||||
|
||||
.web2py_counter {
|
||||
margin-top:5px;
|
||||
margin-right:2px;
|
||||
width:35%;
|
||||
float:right;
|
||||
text-align:right;
|
||||
}
|
||||
|
||||
/*Fix firefox problem*/
|
||||
.web2py_table {clear:both; display:block}
|
||||
|
||||
.web2py_paginator {
|
||||
padding:5px;
|
||||
text-align:right;
|
||||
background-color:#f2f2f2;
|
||||
|
||||
}
|
||||
.web2py_paginator ul {
|
||||
list-style-type:none;
|
||||
margin:0px;
|
||||
padding:0px;
|
||||
}
|
||||
|
||||
.web2py_paginator ul li {
|
||||
display:inline;
|
||||
}
|
||||
|
||||
.web2py_paginator .current {
|
||||
font-weight:bold;
|
||||
}
|
||||
|
||||
.web2py_breadcrumbs ul {
|
||||
list-style:none;
|
||||
margin-bottom:18px;
|
||||
}
|
||||
|
||||
li.w2p_grid_breadcrumb_elem {
|
||||
display:inline-block;
|
||||
}
|
||||
|
||||
.web2py_console form { vertical-align: middle; }
|
||||
.web2py_console input, .web2py_console select,
|
||||
.web2py_console a { margin: 2px; }
|
||||
|
||||
.web2py_htmltable {
|
||||
width: 100%;
|
||||
overflow-x: auto;
|
||||
-ms-overflow-x:scroll;
|
||||
}
|
||||
|
||||
#wiki_page_body {
|
||||
width: 600px;
|
||||
height: auto;
|
||||
min-height: 400px;
|
||||
}
|
||||
|
||||
/* fix some IE problems */
|
||||
|
||||
.ie-lte7 .topbar .container {z-index:2}
|
||||
.ie-lte8 div.flash{ filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#222222', endColorstr='#000000', GradientType=0 ); }
|
||||
.ie-lte8 div.flash:hover {filter:alpha(opacity=25);}
|
||||
.ie9 #w2p_query_panel {padding-bottom:2px}
|
||||
@@ -1,264 +0,0 @@
|
||||
/*=============================================================
|
||||
CUSTOM RULES
|
||||
==============================================================*/
|
||||
|
||||
body{height:auto;} /* to avoid vertical scroll bar */
|
||||
|
||||
a{}
|
||||
a:visited{}
|
||||
a:hover{}
|
||||
a:focus{}
|
||||
a:active{}
|
||||
|
||||
h1{}
|
||||
h2{}
|
||||
h3{}
|
||||
h4{}
|
||||
h5{}
|
||||
h6{}
|
||||
|
||||
div.flash.flash-center{left:25%;right:25%;}
|
||||
div.flash.flash-top,div.flash.flash-top:hover{
|
||||
position:relative;
|
||||
display:block;
|
||||
margin:0;
|
||||
padding:1em;
|
||||
top:0;
|
||||
left:0;
|
||||
width:100%;
|
||||
text-align:center;
|
||||
text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);
|
||||
color:#865100;
|
||||
background:#feea9a;
|
||||
border:1px solid;
|
||||
border-top:0px;
|
||||
border-left:0px;
|
||||
border-right:0px;
|
||||
border-radius:0;
|
||||
opacity:1;
|
||||
}
|
||||
#header{margin-top:60px;}
|
||||
.mastheader h1 {
|
||||
margin-bottom:9px;
|
||||
font-size:81px;
|
||||
font-weight:bold;
|
||||
letter-spacing:-1px;
|
||||
line-height:1;
|
||||
font-size:54px;
|
||||
}
|
||||
.mastheader small {
|
||||
font-size:20px;
|
||||
font-weight:300;
|
||||
}
|
||||
/* auth navbar - primitive style */
|
||||
.auth_navbar,.auth_navbar a{color:inherit;}
|
||||
.navbar-inner {-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}
|
||||
.ie-lte7 .auth_navbar,.auth_navbar a{color:expression(this.parentNode.currentStyle['color']); /* ie7 doesn't support inherit */}
|
||||
.auth_navbar a{white-space:nowrap;} /* to avoid the nav split on more lines */
|
||||
.auth_navbar a:hover{color:white;text-decoration:none;}
|
||||
ul#navbar>.auth_navbar{
|
||||
display:inline-block;
|
||||
padding:5px;
|
||||
}
|
||||
/* form errors message box customization */
|
||||
div.error_wrapper{margin-bottom:9px;}
|
||||
div.error_wrapper .error{
|
||||
border-radius: 4px;
|
||||
-o-border-radius: 4px;
|
||||
-moz-border-radius: 4px;
|
||||
-webkit-border-radius: 4px;
|
||||
}
|
||||
/* below rules are only for formstyle = bootstrap
|
||||
trying to make errors look like bootstrap ones */
|
||||
div.controls .error_wrapper{
|
||||
display:inline-block;
|
||||
margin-bottom:0;
|
||||
vertical-align:middle;
|
||||
}
|
||||
div.controls .error{
|
||||
min-width:5px;
|
||||
background:inherit;
|
||||
color:#B94A48;
|
||||
border:none;
|
||||
padding:0;
|
||||
margin:0;
|
||||
/*display:inline;*/ /* uncommenting this, the animation effect is lost */
|
||||
}
|
||||
div.controls .help-inline{color:#3A87AD;}
|
||||
div.controls .error_wrapper +.help-inline {margin-left:-99999px;}
|
||||
div.controls select +.error_wrapper {margin-left:5px;}
|
||||
.ie-lte7 div.error{color:#fff;}
|
||||
|
||||
/* beautify brand */
|
||||
.navbar {margin-bottom:0}
|
||||
.navbar-inverse .brand{color:#c6cecc;}
|
||||
.navbar-inverse .brand b{display:inline-block;margin-top:-1px;}
|
||||
.navbar-inverse .brand b>span{font-size:22px;color:white}
|
||||
.navbar-inverse .brand:hover b>span{color:white}
|
||||
/* beautify web2py link in navbar */
|
||||
span.highlighted{color:#d8d800;}
|
||||
.open span.highlighted{color:#ffff00;}
|
||||
|
||||
/*=============================================================
|
||||
OVERRIDING WEB2PY.CSS RULES
|
||||
==============================================================*/
|
||||
|
||||
/* reset to default */
|
||||
a{white-space:normal;}
|
||||
li{margin-bottom:0;}
|
||||
textarea,button{display:block;}
|
||||
/*reset ul padding */
|
||||
ul#navbar{padding:0;}
|
||||
/* label aligned to related input */
|
||||
td.w2p_fl,td.w2p_fc {padding:0;}
|
||||
#web2py_user_form td{vertical-align:middle;}
|
||||
|
||||
/*=============================================================
|
||||
OVERRIDING BOOTSTRAP.CSS RULES
|
||||
==============================================================*/
|
||||
|
||||
/* because web2py handles this via js */
|
||||
textarea { width:90%}
|
||||
.hidden{visibility:visible;}
|
||||
/* right folder for bootstrap black images/icons */
|
||||
[class^="icon-"],[class*=" icon-"]{
|
||||
background-image:url("../images/glyphicons-halflings.png")
|
||||
}
|
||||
/* right folder for bootstrap white images/icons */
|
||||
.icon-white,
|
||||
.nav-tabs > .active > a > [class^="icon-"],
|
||||
.nav-tabs > .active > a > [class*=" icon-"],
|
||||
.nav-pills > .active > a > [class^="icon-"],
|
||||
.nav-pills > .active > a > [class*=" icon-"],
|
||||
.nav-list > .active > a > [class^="icon-"],
|
||||
.nav-list > .active > a > [class*=" icon-"],
|
||||
.navbar-inverse .nav > .active > a > [class^="icon-"],
|
||||
.navbar-inverse .nav > .active > a > [class*=" icon-"],
|
||||
.dropdown-menu > li > a:hover > [class^="icon-"],
|
||||
.dropdown-menu > li > a:hover > [class*=" icon-"],
|
||||
.dropdown-menu > .active > a > [class^="icon-"],
|
||||
.dropdown-menu > .active > a > [class*=" icon-"] {
|
||||
background-image:url("../images/glyphicons-halflings-white.png");
|
||||
}
|
||||
/* bootstrap has a label as input's wrapper while web2py has a div */
|
||||
div>input[type="radio"],div>input[type="checkbox"]{margin:0;}
|
||||
/* bootstrap has button instead of input */
|
||||
input[type="button"], input[type="submit"]{margin-right:8px;}
|
||||
|
||||
/* web2py radio widget adjustment */
|
||||
.generic-widget input[type='radio'] {margin:-1px 0 0 0; vertical-align: middle;}
|
||||
.generic-widget input[type='radio'] + label {display:inline-block; margin:0 0 0 6px; vertical-align: middle;}
|
||||
|
||||
/*=============================================================
|
||||
RULES FOR SOLVING CONFLICTS BETWEEN WEB2PY.CSS AND BOOTSTRAP.CSS
|
||||
==============================================================*/
|
||||
|
||||
/*when formstyle=table3cols*/
|
||||
tr#auth_user_remember__row>td.w2p_fw>div{padding-bottom:8px;}
|
||||
td.w2p_fw div>label{vertical-align:middle;}
|
||||
td.w2p_fc {padding-bottom:5px;}
|
||||
/*when formstyle=divs*/
|
||||
div#auth_user_remember__row{margin-top:4px;}
|
||||
div#auth_user_remember__row>.w2p_fl{display:none;}
|
||||
div#auth_user_remember__row>.w2p_fw{min-height:39px;}
|
||||
div.w2p_fw,div.w2p_fc{
|
||||
display:inline-block;
|
||||
vertical-align:middle;
|
||||
margin-bottom:0;
|
||||
}
|
||||
div.w2p_fc{
|
||||
padding-left:5px;
|
||||
margin-top:-8px;
|
||||
}
|
||||
/*when formstyle=ul*/
|
||||
form>ul{
|
||||
list-style:none;
|
||||
margin:0;
|
||||
}
|
||||
li#auth_user_remember__row{margin-top:4px;}
|
||||
li#auth_user_remember__row>.w2p_fl{display:none;}
|
||||
li#auth_user_remember__row>.w2p_fw{min-height:39px;}
|
||||
/*when formstyle=bootstrap*/
|
||||
#auth_user_remember__row label.checkbox{display:block;}
|
||||
span.inline-help{display:inline-block;}
|
||||
input[type="text"].input-xlarge,input[type="password"].input-xlarge{width:270px;}
|
||||
/*when recaptcha is used*/
|
||||
#recaptcha{min-height:30px;display:inline-block;margin-bottom:0;line-height:30px;vertical-align:middle;}
|
||||
td>#recaptcha{margin-bottom:6px;}
|
||||
div>#recaptcha{margin-bottom:9px;}
|
||||
div.control-group.error{
|
||||
width:auto;
|
||||
background:transparent;
|
||||
border:0;
|
||||
color:inherit;
|
||||
padding:0;
|
||||
background-repeat:repeat;
|
||||
}
|
||||
|
||||
/*=============================================================
|
||||
OTHER RULES
|
||||
==============================================================*/
|
||||
|
||||
/* Massimo Di Pierro fixed alignment in forms with list:string */
|
||||
form table tr{margin-bottom:9px;}
|
||||
td.w2p_fw ul{margin-left:0px;}
|
||||
|
||||
/* web2py_console in grid and smartgrid */
|
||||
.hidden{visibility:visible;}
|
||||
.web2py_console input{
|
||||
display: inline-block;
|
||||
margin-bottom: 0;
|
||||
vertical-align: middle;
|
||||
}
|
||||
.web2py_console input[type="submit"],
|
||||
.web2py_console input[type="button"],
|
||||
.web2py_console button{
|
||||
padding-top:4px;
|
||||
padding-bottom:4px;
|
||||
margin:3px 0 0 2px;
|
||||
}
|
||||
.web2py_console a,
|
||||
.web2py_console select,
|
||||
.web2py_console input
|
||||
{
|
||||
margin:3px 0 0 2px;
|
||||
}
|
||||
.web2py_grid form table{width:auto;}
|
||||
/* auth_user_remember checkbox extrapadding in IE fix */
|
||||
.ie-lte9 input#auth_user_remember.checkbox {padding-left:0;}
|
||||
|
||||
div.controls .error {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
/*=============================================================
|
||||
MEDIA QUERIES
|
||||
==============================================================*/
|
||||
|
||||
@media only screen and (max-width:979px){
|
||||
body{padding-top:0px;}
|
||||
#navbar{/*top:5px;*/}
|
||||
div.flash{right:5px;}
|
||||
.dropdown-menu ul{visibility:visible;}
|
||||
}
|
||||
|
||||
@media only screen and (max-width:479px){
|
||||
body{
|
||||
padding-left:10px;
|
||||
padding-right:10px;
|
||||
}
|
||||
.navbar-fixed-top,.navbar-fixed-bottom {
|
||||
margin-left:-10px;
|
||||
margin-right:-10px;
|
||||
}
|
||||
input[type="text"],input[type="password"],select{
|
||||
width:95%;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 767px) {
|
||||
.navbar {
|
||||
margin-right: -20px;
|
||||
margin-left: -20px;
|
||||
}
|
||||
}
|
||||
@@ -1,122 +0,0 @@
|
||||
/*=============================================================
|
||||
BOOTSTRAP DROPDOWN MENU
|
||||
==============================================================*/
|
||||
|
||||
.dropdown-menu ul{
|
||||
left:100%;
|
||||
position:absolute;
|
||||
top:0;
|
||||
visibility:hidden;
|
||||
margin-top:-1px;
|
||||
}
|
||||
.dropdown-menu li:hover ul{visibility:visible;}
|
||||
.navbar .dropdown-menu ul:before{
|
||||
border-bottom:7px solid transparent;
|
||||
border-left:none;
|
||||
border-right:7px solid rgba(0, 0, 0, 0.2);
|
||||
border-top:7px solid transparent;
|
||||
left:-7px;
|
||||
top:5px;
|
||||
}
|
||||
.nav > li.dropdown > a:after {
|
||||
border-left: 4px solid transparent;
|
||||
border-right: 4px solid transparent;
|
||||
border-top: 4px solid #000000;
|
||||
content: "";
|
||||
display: inline-block;
|
||||
height: 0;
|
||||
opacity: 0.7;
|
||||
vertical-align: top;
|
||||
width: 0;
|
||||
|
||||
margin-left: 2px;
|
||||
margin-top: 8px;
|
||||
|
||||
border-bottom-color: #FFFFFF;
|
||||
border-top-color: #FFFFFF;
|
||||
}
|
||||
.dropdown-menu span{display:inline-block;}
|
||||
ul.dropdown-menu li.dropdown > a:after {
|
||||
border-left: 4px solid #000;
|
||||
border-right: 4px solid transparent;
|
||||
border-bottom: 4px solid transparent;
|
||||
border-top: 4px solid transparent;
|
||||
content: "";
|
||||
display: inline-block;
|
||||
height: 0;
|
||||
opacity: 0.7;
|
||||
vertical-align: top;
|
||||
width: 0;
|
||||
|
||||
margin-left: 8px;
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
ul.nav li.dropdown:hover ul.dropdown-menu {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.open >.dropdown-menu ul{display:block;} /* fix menu issue when BS2.0.4 is applied */
|
||||
|
||||
/*=============================================================
|
||||
BOOTSTRAP SUBMIT BUTTON
|
||||
==============================================================*/
|
||||
|
||||
input[type='submit']:not(.btn) {
|
||||
display: inline-block;
|
||||
padding: 4px 14px;
|
||||
margin-bottom: 0;
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
color: #333;
|
||||
text-align: center;
|
||||
text-shadow: 0 1px 1px rgba(255, 255, 255, 0.75);
|
||||
vertical-align: middle;
|
||||
cursor: pointer;
|
||||
background-color: whiteSmoke;
|
||||
background-image: -webkit-gradient(linear,0 0,0 100%,from(white),to(#E6E6E6));
|
||||
background-image: -webkit-linear-gradient(top,white,#E6E6E6);
|
||||
background-image: -o-linear-gradient(top,white,#E6E6E6);
|
||||
background-image: linear-gradient(to bottom,white,#E6E6E6);
|
||||
background-image: -moz-linear-gradient(top,white,#E6E6E6);
|
||||
background-repeat: repeat-x;
|
||||
border: 1px solid #BBB;
|
||||
border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);
|
||||
border-bottom-color: #A2A2A2;
|
||||
-webkit-border-radius: 4px;
|
||||
-moz-border-radius: 4px;
|
||||
border-radius: 4px;
|
||||
filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffe6e6e6',GradientType=0);
|
||||
filter: progid:dximagetransform.microsoft.gradient(enabled=false);
|
||||
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
-moz-box-shadow: inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);
|
||||
box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
input[type='submit']:not(.btn):hover {
|
||||
color: #333;
|
||||
text-decoration: none;
|
||||
background-color: #E6E6E6;
|
||||
background-position: 0 -15px;
|
||||
-webkit-transition: background-position .1s linear;
|
||||
-moz-transition: background-position .1s linear;
|
||||
-o-transition: background-position .1s linear;
|
||||
transition: background-position .1s linear;
|
||||
}
|
||||
|
||||
input[type='submit']:not(.btn).active, input[type='submit']:not(.btn):active {
|
||||
background-color: #E6E6E6;
|
||||
background-color: #D9D9D9 9;
|
||||
background-image: none;
|
||||
outline: 0;
|
||||
-webkit-box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
-moz-box-shadow: inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);
|
||||
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
/*=============================================================
|
||||
OTHER
|
||||
==============================================================*/
|
||||
|
||||
.ie-lte8 .navbar-fixed-top {position:static;}
|
||||
|
||||
@@ -77,10 +77,10 @@ function doClickSave() {
|
||||
t.attr('disabled', '');
|
||||
var flash = xhr.getResponseHeader('web2py-component-flash');
|
||||
if(flash) {
|
||||
$('.flash').html(decodeURIComponent(flash))
|
||||
$('.w2p_flash').html(decodeURIComponent(flash))
|
||||
.append('<a href="#" class="close">×</a>')
|
||||
.slideDown();
|
||||
} else $('.flash').hide();
|
||||
} else $('.w2p_flash').hide();
|
||||
try {
|
||||
if(json.error) {
|
||||
window.location.href = json.redirect;
|
||||
@@ -158,10 +158,10 @@ function doToggleBreakpoint(filename, url, sel) {
|
||||
// show flash message (if any)
|
||||
var flash = xhr.getResponseHeader('web2py-component-flash');
|
||||
if(flash) {
|
||||
$('.flash').html(decodeURIComponent(flash))
|
||||
$('.w2p_flash').html(decodeURIComponent(flash))
|
||||
.append('<a href="#" class="close">×</a>')
|
||||
.slideDown();
|
||||
} else $('.flash').hide();
|
||||
} else $('.w2p_flash').hide();
|
||||
try {
|
||||
if(json.error) {
|
||||
window.location.href = json.redirect;
|
||||
|
||||
+5
-4
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -137,9 +137,9 @@
|
||||
<h4>{{=T("Overview")}}</h4>
|
||||
<p>{{=T.M("Number of entries: **%s**", total['entries'])}}</p>
|
||||
{{if total['entries'] > 0:}}
|
||||
<p>{{=T.M("Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses})",
|
||||
dict(ratio=total['ratio'], hits=total['hits'], misses=total['misses']))}}
|
||||
</p>
|
||||
<p>{{=T.M("Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})",
|
||||
dict( ratio=total['ratio'], hits=total['hits'], misses=total['misses']))}}
|
||||
</p>
|
||||
<p>
|
||||
{{=T("Size of cache:")}}
|
||||
{{if object_stats:}}
|
||||
@@ -155,8 +155,8 @@
|
||||
{{=T.M("Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=total['oldest'][0], min=total['oldest'][1], sec=total['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('Cache Keys'), _onclick='jQuery("#all_keys").toggle();')}}
|
||||
<div class="hidden" id="all_keys">
|
||||
{{=BUTTON(T('Cache Keys'), _onclick='jQuery("#all_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="all_keys">
|
||||
{{=total['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -183,8 +183,8 @@
|
||||
{{=T.M("RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=ram['oldest'][0], min=ram['oldest'][1], sec=ram['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('RAM Cache Keys'), _onclick='jQuery("#ram_keys").toggle();')}}
|
||||
<div class="hidden" id="ram_keys">
|
||||
{{=BUTTON(T('RAM Cache Keys'), _onclick='jQuery("#ram_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="ram_keys">
|
||||
{{=ram['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -212,8 +212,8 @@
|
||||
{{=T.M("DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=disk['oldest'][0], min=disk['oldest'][1], sec=disk['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('Disk Cache Keys'), _onclick='jQuery("#disk_keys").toggle();')}}
|
||||
<div class="hidden" id="disk_keys">
|
||||
{{=BUTTON(T('Disk Cache Keys'), _onclick='jQuery("#disk_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="disk_keys">
|
||||
{{=disk['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -249,8 +249,8 @@
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['png'])}}">png</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['svg'])}}">svg</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['pdf'])}}">pdf</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['ps'])}}">ps</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['dot'])}}">dot</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['ps'])}}">ps</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['dot'])}}">dot</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<br />
|
||||
|
||||
@@ -207,7 +207,7 @@ for c in controllers: controller_functions+=[c[:-3]+'/%s.html'%x for x in functi
|
||||
{{=peekfile('views',c, dict(id=id))}}
|
||||
</span>
|
||||
<span class="extras celled celled-one">
|
||||
{{if extend.has_key(c):}}{{=T("extends")}} <b>{{=extend[c]}}</b> {{pass}}
|
||||
{{if c in extend:}}{{=T("extends")}} <b>{{=extend[c]}}</b> {{pass}}
|
||||
{{if include[c]:}}{{=T("includes")}} {{pass}}{{=XML(', '.join([B(f).xml() for f in include[c]]))}}
|
||||
</span>
|
||||
</li>
|
||||
@@ -477,11 +477,11 @@ function filter_files() {
|
||||
message=data['message'];
|
||||
for(var i=0; i<files.length; i++)
|
||||
jQuery('li#_'+files[i].replace(/\//g,'__').replace('.','__')).slideDown();
|
||||
jQuery('.flash').html(message).slideDown();
|
||||
jQuery('.w2p_flash').html(message).slideDown();
|
||||
});
|
||||
} else {
|
||||
jQuery('.component_contents li, .formfield, .comptools').slideDown();
|
||||
jQuery('.flash').html('').hide();
|
||||
jQuery('.w2p_flash').html('').hide();
|
||||
}
|
||||
}
|
||||
jQuery(document).ready(function(){
|
||||
|
||||
@@ -144,7 +144,7 @@ for c in controllers: controller_functions+=[c[:-3]+'/%s.html'%x for x in functi
|
||||
{{=peekfile('views',c)}}
|
||||
</span>
|
||||
<span class="extras celled">
|
||||
{{if extend.has_key(c):}}{{=T("extends")}} <b>{{=extend[c]}}</b> {{pass}}
|
||||
{{if c in extend:}}{{=T("extends")}} <b>{{=extend[c]}}</b> {{pass}}
|
||||
{{if include[c]:}}{{=T("includes")}} {{pass}}{{=XML(', '.join([B(f).xml() for f in include[c]]))}}
|
||||
</span>
|
||||
</li>
|
||||
|
||||
@@ -27,7 +27,9 @@
|
||||
{{buttons.append((URL('pack',args=a), T("Pack all")))}}
|
||||
{{buttons.append((URL('pack_custom',args=a), T("Pack custom")))}}
|
||||
{{if not os.path.exists('applications/%s/compiled' % a):}}
|
||||
{{buttons.append((URL('compile_app',args=a), T("Compile")))}}
|
||||
{{buttons.append((URL('compile_app',args=[a, 'skip_failed_views']),
|
||||
T("Compile (skip failed views)")))}}
|
||||
{{buttons.append((URL('compile_app',args=a), T("Compile (all or nothing)")))}}
|
||||
{{else:}}
|
||||
{{buttons.append((URL('pack',args=(a, 'compiled')), T("Pack compiled")))}}
|
||||
{{if glob.glob('applications/%s/controllers/*.py' % a):}}
|
||||
@@ -138,6 +140,7 @@
|
||||
<p class="row-buttons">
|
||||
{{=button(URL('gae','deploy'), T('Deploy on Google App Engine'))}}
|
||||
{{=button(URL('openshift','deploy'),T('Deploy to OpenShift'))}}
|
||||
{{=button(URL('pythonanywhere','deploy'), T('Deploy to PythonAnywhere'))}}
|
||||
</p>
|
||||
</div> <!-- /DEPLOY ON GAE -->
|
||||
<!-- APP WIZARD -->
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
<div id="{{=globals().get('main_id', 'main')}}" class="container-fluid">
|
||||
<div id="main_inner" class="row-fluid">
|
||||
<div class="span12">
|
||||
<div class="flash alert">{{=response.flash or ''}}</div>
|
||||
<div class="w2p_flash alert">{{=response.flash or ''}}</div>
|
||||
{{include}}
|
||||
</div><!-- /main span12 -->
|
||||
</div><!-- /main row-fluid -->
|
||||
@@ -77,6 +77,7 @@
|
||||
<script type="text/javascript">
|
||||
jQuery(document).ready(function(){
|
||||
jQuery("[rel=tooltip]").tooltip();
|
||||
jQuery(":input").attr("autocomplete","off");
|
||||
});
|
||||
</script>
|
||||
<script>
|
||||
|
||||
@@ -0,0 +1,176 @@
|
||||
{{extend 'layout.html'}}
|
||||
<h2><span style="color:#139FD7">python</span>anywhere {{=T('Deployment Interface')}}</h2>
|
||||
|
||||
|
||||
<div id="register_form">
|
||||
<h3>{{=T('Login/Register')}}</h3>
|
||||
<form class="form-horizontal" id="palogin">
|
||||
|
||||
<div class="control-group" id="username__row">
|
||||
<label class="control-label" for="username">{{=T('Username')}}</label>
|
||||
<div class="controls">
|
||||
<input type="text" name="username" id="username"><span class="help-inline">*</span>
|
||||
<span class="help-block"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group" id="email_address__row">
|
||||
<label class="control-label" for="email_address">{{=T('Email Address')}}</label>
|
||||
<div class="controls">
|
||||
<input type="text" name="email_address" id="email_address">
|
||||
<span class="help-block"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group" id="pythonanywhere_password__row">
|
||||
<label class="control-label" for="pythonanywhere_password">{{=T('PythonAnywhere Password')}}</label>
|
||||
<div class="controls">
|
||||
<input type="password" name="pythonanywhere_password" id="pythonanywhere_password">
|
||||
<span class="help-block"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group" id="web2py_admin_password__row">
|
||||
<label class="control-label" for="web2py_admin_password">{{=T('web2py Admin Password')}}</label>
|
||||
<div class="controls">
|
||||
<input type="password" name="web2py_admin_password" id="web2py_admin_password"><span class="help-inline">*</span>
|
||||
<span class="help-block"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group" id="accepts_terms__row">
|
||||
<div class="controls">
|
||||
<label class="checkbox">
|
||||
<input type="checkbox" name="accepts_terms" id="accepts_terms"><a target="_blank" href="https://www.pythonanywhere.com/terms/">{{=T('Accept Terms')}}</a>
|
||||
</label>
|
||||
<span class="help-block"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<div class="controls">
|
||||
<button type="submit" class="btn btn-primary" id="submit_palogin">{{=T('Submit')}}</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
<p>* {{=T('You only need these if you have already registered')}}</p>
|
||||
</div>
|
||||
|
||||
<div class="row-fluid" id="app_manager" style="display:none;">
|
||||
<div class="span6">
|
||||
<h3>{{=T('Local Apps')}}</h3>
|
||||
<form id="apppicker">
|
||||
<select name="apps" class="form-control" id="local" multiple>
|
||||
<option>{{=T('Loading...')}}</option>
|
||||
</select>
|
||||
<input type="submit" value="Deploy" id="deploy_button" class="btn btn-primary">
|
||||
</form>
|
||||
|
||||
<div class="alert alert-info">
|
||||
<strong>{{=T('Warning!')}}</strong> {{=T('if your application uses a database other than sqlite you will then have to configure its DAL in pythonanywhere.')}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="span6">
|
||||
<h3>{{=T('PythonAnywhere Apps')}}</h3>
|
||||
<ul id="pythonanywhere">
|
||||
<li>{{=T('Loading...')}}</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
$('#palogin').off('submit');
|
||||
$('#palogin').submit(function(event) {
|
||||
var data = $('#palogin').serialize();
|
||||
$.web2py.disableElement($('#submit_palogin'));
|
||||
$.web2py.disableFormElements($('#palogin'));
|
||||
$.ajax({
|
||||
url: '{{=URL("pythonanywhere", "create_account")}}',
|
||||
type: 'POST',
|
||||
data: data,
|
||||
dataType: 'json',
|
||||
}).done(function(data, textStatus, jqXHR) {
|
||||
$('#palogin .error').removeClass('error');
|
||||
$('#palogin .help-block').text('');
|
||||
if(data.status == 'error') {
|
||||
for(var error in data.errors) {
|
||||
$('#' + error + '__row').addClass('error');
|
||||
$('#' + error + '__row .help-block').text(data.errors[error][0]);
|
||||
}
|
||||
$.web2py.enableElement($('#submit_palogin'));
|
||||
$.web2py.enableFormElements($('#palogin'));
|
||||
$.web2py.flash("{{=T('Form has errors')}}");
|
||||
} else {
|
||||
$.web2py.flash("{{=T('Login successful')}}");
|
||||
$('#register_form').hide();
|
||||
$('#app_manager').show();
|
||||
refresh_apps();
|
||||
}
|
||||
}).fail(function(){
|
||||
$.web2py.flash("{{=T('Something went wrong please wait a few minutes before retrying')}}");
|
||||
$.web2py.enableElement($('#submit_palogin'));
|
||||
$.web2py.enableFormElements($('#palogin'));
|
||||
});
|
||||
event.preventDefault();
|
||||
});
|
||||
|
||||
$('#apppicker').off('submit');
|
||||
$('#apppicker').submit(function(event) {
|
||||
var data = $('#apppicker').serialize();
|
||||
$.web2py.disableElement($('#deploy_button'));
|
||||
$.ajax({
|
||||
url: '{{=URL("pythonanywhere", "bulk_install")}}',
|
||||
type: 'POST',
|
||||
data: {username: $('#username').val(), password: $('#web2py_admin_password').val(), apps: $('#local').val()},
|
||||
dataType: 'json',
|
||||
}).done(function(data, textStatus, jqXHR) {
|
||||
refresh_apps();
|
||||
$.web2py.enableElement($('#deploy_button'));
|
||||
}).fail(function(){
|
||||
$.web2py.flash("{{=T('Something went wrong please wait a few minutes before retrying')}}");
|
||||
$.web2py.enableElement($('#deploy_button'));
|
||||
});
|
||||
event.preventDefault();
|
||||
});
|
||||
});
|
||||
|
||||
function refresh_apps() {
|
||||
// Refresh List of Apps
|
||||
$('#deploy_button').prop('disabled', true);
|
||||
$.ajax({
|
||||
url: '{{=URL("pythonanywhere", "list_apps")}}',
|
||||
type: 'GET',
|
||||
data: {username: $('#username').val(), password: $('#web2py_admin_password').val()},
|
||||
dataType: 'json',
|
||||
}).done(function(data, textStatus, jqXHR) {
|
||||
var i = 0;
|
||||
$('#local').html('')
|
||||
for(i = 0; i < data.local.length; i++) {
|
||||
$('#local').append($('<option>', {
|
||||
value: data.local[i],
|
||||
text: data.local[i]
|
||||
}));
|
||||
}
|
||||
$('#local').multiSelect('refresh');
|
||||
$('#pythonanywhere').html('')
|
||||
for(i = 0; i < data.pythonanywhere.length; i++) {
|
||||
$('#pythonanywhere').append($('<li>', {
|
||||
text: data.pythonanywhere[i]
|
||||
}));
|
||||
}
|
||||
$('#deploy_button').prop('disabled', false);
|
||||
$.web2py.hide_flash();
|
||||
}).fail(function(){
|
||||
// Mostly this happens if it's a new account, just waiting a bit should be enough.
|
||||
$.get('http://' + $('#username').val() + '.pythonanywhere.com'); // Kickstart the instance
|
||||
$.web2py.flash("{{=T('Please wait, giving pythonanywhere a moment...')}}");
|
||||
setTimeout(refresh_apps, 30000);
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
@@ -445,30 +445,31 @@ def ccache():
|
||||
gae_stats['oldest'] = GetInHMS(time.time() - gae_stats['oldest_item_age'])
|
||||
total.update(gae_stats)
|
||||
else:
|
||||
# get ram stats directly from the cache object
|
||||
ram_stats = cache.ram.stats[request.application]
|
||||
ram['hits'] = ram_stats['hit_total'] - ram_stats['misses']
|
||||
ram['misses'] = ram_stats['misses']
|
||||
try:
|
||||
ram['ratio'] = ram['hits'] * 100 / ram_stats['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
|
||||
for key, value in cache.ram.storage.iteritems():
|
||||
if isinstance(value, dict):
|
||||
ram['hits'] = value['hit_total'] - value['misses']
|
||||
ram['misses'] = value['misses']
|
||||
try:
|
||||
ram['ratio'] = ram['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
ram['entries'] += 1
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
ram['entries'] += 1
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
|
||||
for key in cache.disk.storage:
|
||||
value = cache.disk.storage[key]
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
if isinstance(value[1], dict):
|
||||
disk['hits'] = value[1]['hit_total'] - value[1]['misses']
|
||||
disk['misses'] = value[1]['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
disk['ratio'] = disk['hits'] * 100 / value[1]['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
@@ -485,7 +486,7 @@ def ccache():
|
||||
ram_keys.remove('oldest')
|
||||
for key in ram_keys:
|
||||
total[key] = ram[key] + disk[key]
|
||||
|
||||
|
||||
try:
|
||||
total['ratio'] = total['hits'] * 100 / (total['hits'] +
|
||||
total['misses'])
|
||||
@@ -575,7 +576,7 @@ def bg_graph_model():
|
||||
meta_graphmodel = dict(group=request.application, color='#ECECEC')
|
||||
|
||||
group = meta_graphmodel['group'].replace(' ', '')
|
||||
if not subgraphs.has_key(group):
|
||||
if group not in subgraphs:
|
||||
subgraphs[group] = dict(meta=meta_graphmodel, tables=[])
|
||||
subgraphs[group]['tables'].append(tablename)
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
#### Learning and Demos
|
||||
- [[Intro video http://www.youtube.com/watch?v=BXzqmHx6edY]] and [[code examples https://github.com/mjhea0/web2py]]
|
||||
- [[Step by step tutorial https://milesm.pythonanywhere.com/wiki]]
|
||||
- [[web2py Reference Project http://www.web2pyref.com/]]
|
||||
- [[Killer Web Development Tutorial http://killer-web-development.com/]]
|
||||
- [[Real Python for the Web http://www.realpython.com]] (web development with web2py and more!)
|
||||
- [[Admin Demo http://www.web2py.com/demo_admin popup]] (web-based IDE)
|
||||
|
||||
@@ -320,3 +320,10 @@ li.w2p_grid_breadcrumb_elem {
|
||||
.ie-lte8 div.flash{ filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#222222', endColorstr='#000000', GradientType=0 ); }
|
||||
.ie-lte8 div.flash:hover {filter:alpha(opacity=25);}
|
||||
.ie9 #w2p_query_panel {padding-bottom:2px}
|
||||
.control-label.readonly{
|
||||
padding-top:0px !important;
|
||||
padding-right:0px !important;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
+5
-4
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -137,9 +137,9 @@
|
||||
<h4>{{=T("Overview")}}</h4>
|
||||
<p>{{=T.M("Number of entries: **%s**", total['entries'])}}</p>
|
||||
{{if total['entries'] > 0:}}
|
||||
<p>{{=T.M("Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses})",
|
||||
dict(ratio=total['ratio'], hits=total['hits'], misses=total['misses']))}}
|
||||
</p>
|
||||
<p>{{=T.M("Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})",
|
||||
dict( ratio=total['ratio'], hits=total['hits'], misses=total['misses']))}}
|
||||
</p>
|
||||
<p>
|
||||
{{=T("Size of cache:")}}
|
||||
{{if object_stats:}}
|
||||
@@ -155,8 +155,8 @@
|
||||
{{=T.M("Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=total['oldest'][0], min=total['oldest'][1], sec=total['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('Cache Keys'), _onclick='jQuery("#all_keys").toggle();')}}
|
||||
<div class="hidden" id="all_keys">
|
||||
{{=BUTTON(T('Cache Keys'), _onclick='jQuery("#all_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="all_keys">
|
||||
{{=total['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -183,8 +183,8 @@
|
||||
{{=T.M("RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=ram['oldest'][0], min=ram['oldest'][1], sec=ram['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('RAM Cache Keys'), _onclick='jQuery("#ram_keys").toggle();')}}
|
||||
<div class="hidden" id="ram_keys">
|
||||
{{=BUTTON(T('RAM Cache Keys'), _onclick='jQuery("#ram_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="ram_keys">
|
||||
{{=ram['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -212,8 +212,8 @@
|
||||
{{=T.M("DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=disk['oldest'][0], min=disk['oldest'][1], sec=disk['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('Disk Cache Keys'), _onclick='jQuery("#disk_keys").toggle();')}}
|
||||
<div class="hidden" id="disk_keys">
|
||||
{{=BUTTON(T('Disk Cache Keys'), _onclick='jQuery("#disk_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="disk_keys">
|
||||
{{=disk['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -249,8 +249,8 @@
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['png'])}}">png</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['svg'])}}">svg</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['pdf'])}}">pdf</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['ps'])}}">ps</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['dot'])}}">dot</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['ps'])}}">ps</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['dot'])}}">dot</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<br />
|
||||
|
||||
@@ -445,30 +445,31 @@ def ccache():
|
||||
gae_stats['oldest'] = GetInHMS(time.time() - gae_stats['oldest_item_age'])
|
||||
total.update(gae_stats)
|
||||
else:
|
||||
# get ram stats directly from the cache object
|
||||
ram_stats = cache.ram.stats[request.application]
|
||||
ram['hits'] = ram_stats['hit_total'] - ram_stats['misses']
|
||||
ram['misses'] = ram_stats['misses']
|
||||
try:
|
||||
ram['ratio'] = ram['hits'] * 100 / ram_stats['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
|
||||
for key, value in cache.ram.storage.iteritems():
|
||||
if isinstance(value, dict):
|
||||
ram['hits'] = value['hit_total'] - value['misses']
|
||||
ram['misses'] = value['misses']
|
||||
try:
|
||||
ram['ratio'] = ram['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
ram['entries'] += 1
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
ram['entries'] += 1
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
|
||||
for key in cache.disk.storage:
|
||||
value = cache.disk.storage[key]
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
if isinstance(value[1], dict):
|
||||
disk['hits'] = value[1]['hit_total'] - value[1]['misses']
|
||||
disk['misses'] = value[1]['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
disk['ratio'] = disk['hits'] * 100 / value[1]['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
@@ -485,7 +486,7 @@ def ccache():
|
||||
ram_keys.remove('oldest')
|
||||
for key in ram_keys:
|
||||
total[key] = ram[key] + disk[key]
|
||||
|
||||
|
||||
try:
|
||||
total['ratio'] = total['hits'] * 100 / (total['hits'] +
|
||||
total['misses'])
|
||||
@@ -575,7 +576,7 @@ def bg_graph_model():
|
||||
meta_graphmodel = dict(group=request.application, color='#ECECEC')
|
||||
|
||||
group = meta_graphmodel['group'].replace(' ', '')
|
||||
if not subgraphs.has_key(group):
|
||||
if group not in subgraphs:
|
||||
subgraphs[group] = dict(meta=meta_graphmodel, tables=[])
|
||||
subgraphs[group]['tables'].append(tablename)
|
||||
|
||||
|
||||
@@ -29,12 +29,12 @@ def user():
|
||||
http://..../[app]/default/user/profile
|
||||
http://..../[app]/default/user/retrieve_password
|
||||
http://..../[app]/default/user/change_password
|
||||
http://..../[app]/default/user/manage_users (requires membership in
|
||||
http://..../[app]/default/user/bulk_register
|
||||
use @auth.requires_login()
|
||||
@auth.requires_membership('group name')
|
||||
@auth.requires_permission('read','table name',record_id)
|
||||
to decorate functions that need access control
|
||||
also notice there is http://..../[app]/appadmin/manage/auth to allow administrator to manage users
|
||||
"""
|
||||
return dict(form=auth())
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ service = Service()
|
||||
plugins = PluginManager()
|
||||
|
||||
## create all tables needed by auth if not custom tables
|
||||
auth.define_tables(username=False, signature=False, enable_tokens=False)
|
||||
auth.define_tables(username=False, signature=False)
|
||||
|
||||
## configure email
|
||||
mail = auth.settings.mailer
|
||||
|
||||
File diff suppressed because one or more lines are too long
+11
-3
File diff suppressed because one or more lines are too long
@@ -1,4 +1,4 @@
|
||||
div.flash {
|
||||
div.w2p_flash {
|
||||
background-image: none;
|
||||
border-radius: 4px;
|
||||
-o-border-radius: 4px;
|
||||
@@ -15,13 +15,13 @@ div.flash {
|
||||
margin: 0 0 20px;
|
||||
padding: 15px 35px 15px 15px;
|
||||
}
|
||||
div.flash.alert:hover {
|
||||
div.w2p_flash.alert:hover {
|
||||
opacity: 1;
|
||||
}
|
||||
.ie-lte8 div.flash {
|
||||
.ie-lte8 div.w2p_flash {
|
||||
filter: progid: DXImageTransform.Microsoft.gradient(startColorstr='#222222', endColorstr='#000000', GradientType=0);
|
||||
}
|
||||
.ie-lte8 div.flash:hover {
|
||||
.ie-lte8 div.w2p_flash:hover {
|
||||
filter: alpha(opacity=25);
|
||||
}
|
||||
.main-container {
|
||||
@@ -37,7 +37,7 @@ div.error {
|
||||
display: inline-block;
|
||||
padding: 5px;
|
||||
}
|
||||
div.flash.alert {
|
||||
div.w2p_flash.alert {
|
||||
display: none;
|
||||
position: fixed;
|
||||
top: 70px;
|
||||
@@ -136,7 +136,7 @@ header h1 {
|
||||
header .jumbotron {
|
||||
background-color: transparent;
|
||||
}
|
||||
.flash {
|
||||
.w2p_flash {
|
||||
opacity: 0.9!important;
|
||||
right: 100px;
|
||||
}
|
||||
@@ -314,6 +314,3 @@ td.w2p_fc,
|
||||
input[type=checkbox], input[type=radio] {
|
||||
margin: 4px 4px 0 0;
|
||||
}
|
||||
.btn {
|
||||
margin-right: 4px;
|
||||
}
|
||||
@@ -33,7 +33,7 @@ audio {width:200px}
|
||||
[type="text"], [type="password"], select {
|
||||
margin-right: 5px; width: 300px;
|
||||
}
|
||||
.hidden {display:none;visibility:visible}
|
||||
.w2p_hidden {display:none;visibility:visible}
|
||||
.right {float:right; text-align:right}
|
||||
.left {float:left; text-align:left}
|
||||
.center {width:100%; text-align:center; vertical-align:middle}
|
||||
@@ -88,7 +88,7 @@ div.w2p_export_menu a, div.w2p_wiki_tags a, div.w2p_cloud a {margin-left:5px; pa
|
||||
#web2py_user_form td {vertical-align:top}
|
||||
|
||||
/*********** web2py specific ***********/
|
||||
div.flash {
|
||||
div.w2p_flash {
|
||||
font-weight:bold;
|
||||
display:none;
|
||||
position:fixed;
|
||||
@@ -117,11 +117,11 @@ div.flash {
|
||||
z-index:2000;
|
||||
}
|
||||
|
||||
div.flash #closeflash{color:inherit; float:right; margin-left:15px;}
|
||||
div.w2p_flash #closeflash{color:inherit; float:right; margin-left:15px;}
|
||||
.ie-lte7 div.flash #closeflash
|
||||
{color:expression(this.parentNode.currentStyle['color']);float:none;position:absolute;right:4px;}
|
||||
|
||||
div.flash:hover { opacity:0.25; }
|
||||
div.w2p_flash:hover { opacity:0.25; }
|
||||
|
||||
div.error_wrapper {display:block}
|
||||
div.error {
|
||||
@@ -304,8 +304,8 @@ li.w2p_grid_breadcrumb_elem {
|
||||
/* fix some IE problems */
|
||||
|
||||
.ie-lte7 .topbar .container {z-index:2}
|
||||
.ie-lte8 div.flash{ filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#222222', endColorstr='#000000', GradientType=0 ); }
|
||||
.ie-lte8 div.flash:hover {filter:alpha(opacity=25);}
|
||||
.ie-lte8 div.w2p_flash{ filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#222222', endColorstr='#000000', GradientType=0 ); }
|
||||
.ie-lte8 div.w2p_flash:hover {filter:alpha(opacity=25);}
|
||||
.ie9 #w2p_query_panel {padding-bottom:2px}
|
||||
|
||||
.web2py_console .form-control {width: 20%; display: inline;}
|
||||
|
||||
+4
-4
File diff suppressed because one or more lines are too long
+5
-4
File diff suppressed because one or more lines are too long
@@ -6,9 +6,9 @@
|
||||
* this over and over... all will be bound to the document
|
||||
*/
|
||||
/*adds btn class to buttons*/
|
||||
$('button', target).addClass('btn btn-default');
|
||||
$('button:not([class^="btn"])', target).addClass('btn btn-default');
|
||||
$("p.w2p-autocomplete-widget input").addClass('form-control');
|
||||
$('form input[type="submit"], form input[type="button"]', target).addClass('btn btn-default');
|
||||
$('form input[type="submit"]:not([class^="btn"]), form input[type="button"]:not([class^="btn"])', target).addClass('btn btn-default');
|
||||
/* javascript for PasswordWidget*/
|
||||
$('input[type=password][data-w2p_entropy]', target).each(function() {
|
||||
web2py.validate_entropy($(this));
|
||||
@@ -18,9 +18,9 @@
|
||||
function pe(ul, e) {
|
||||
var new_line = ml(ul);
|
||||
rel(ul);
|
||||
if ($(e.target).parent().is(':visible')) {
|
||||
if ($(e.target).closest('li').is(':visible')) {
|
||||
/* make sure we didn't delete the element before we insert after */
|
||||
new_line.insertAfter($(e.target).parent());
|
||||
new_line.insertAfter($(e.target).closest('li'));
|
||||
} else {
|
||||
/* the line we clicked on was deleted, just add to end of list */
|
||||
new_line.appendTo(ul);
|
||||
@@ -30,9 +30,9 @@
|
||||
}
|
||||
|
||||
function rl(ul, e) {
|
||||
if ($(ul).children().length > 1) {
|
||||
if ($(ul).find('li').length > 1) {
|
||||
/* only remove if we have more than 1 item so the list is never empty */
|
||||
$(e.target).parent().remove();
|
||||
$(e.target).closest('li').remove();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,13 +46,13 @@
|
||||
function rel(ul) {
|
||||
/* keep only as many as needed*/
|
||||
$(ul).find("li").each(function() {
|
||||
var trimmed = $.trim($(this.firstChild).val());
|
||||
var trimmed = $.trim($(this).find(":text").val());
|
||||
if (trimmed == '') $(this).remove();
|
||||
else $(this.firstChild).val(trimmed);
|
||||
else $(this).find(":text").val(trimmed);
|
||||
});
|
||||
}
|
||||
var ul = this;
|
||||
$(ul).find(":text").after('<a class="btn btn-default" href="#">+</a> <a class="btn btn-default" href="#">-</a>').keypress(function(e) {
|
||||
$(ul).find(":text").addClass('form-control').wrap("<div class='input-group'></div>").after('<div class="input-group-addon"><i class="glyphicon glyphicon-plus"></i></div><div class="input-group-addon"><i class="glyphicon glyphicon-minus"></i></div>').keypress(function(e) {
|
||||
return (e.which == 13) ? pe(ul, e) : true;
|
||||
}).next().click(function(e) {
|
||||
pe(ul, e);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -137,9 +137,9 @@
|
||||
<h4>{{=T("Overview")}}</h4>
|
||||
<p>{{=T.M("Number of entries: **%s**", total['entries'])}}</p>
|
||||
{{if total['entries'] > 0:}}
|
||||
<p>{{=T.M("Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses})",
|
||||
dict(ratio=total['ratio'], hits=total['hits'], misses=total['misses']))}}
|
||||
</p>
|
||||
<p>{{=T.M("Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})",
|
||||
dict( ratio=total['ratio'], hits=total['hits'], misses=total['misses']))}}
|
||||
</p>
|
||||
<p>
|
||||
{{=T("Size of cache:")}}
|
||||
{{if object_stats:}}
|
||||
@@ -155,8 +155,8 @@
|
||||
{{=T.M("Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=total['oldest'][0], min=total['oldest'][1], sec=total['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('Cache Keys'), _onclick='jQuery("#all_keys").toggle();')}}
|
||||
<div class="hidden" id="all_keys">
|
||||
{{=BUTTON(T('Cache Keys'), _onclick='jQuery("#all_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="all_keys">
|
||||
{{=total['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -183,8 +183,8 @@
|
||||
{{=T.M("RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=ram['oldest'][0], min=ram['oldest'][1], sec=ram['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('RAM Cache Keys'), _onclick='jQuery("#ram_keys").toggle();')}}
|
||||
<div class="hidden" id="ram_keys">
|
||||
{{=BUTTON(T('RAM Cache Keys'), _onclick='jQuery("#ram_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="ram_keys">
|
||||
{{=ram['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -212,8 +212,8 @@
|
||||
{{=T.M("DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.",
|
||||
dict(hours=disk['oldest'][0], min=disk['oldest'][1], sec=disk['oldest'][2]))}}
|
||||
</p>
|
||||
{{=BUTTON(T('Disk Cache Keys'), _onclick='jQuery("#disk_keys").toggle();')}}
|
||||
<div class="hidden" id="disk_keys">
|
||||
{{=BUTTON(T('Disk Cache Keys'), _onclick='jQuery("#disk_keys").toggle().toggleClass( "w2p_hidden" );')}}
|
||||
<div class="w2p_hidden" id="disk_keys">
|
||||
{{=disk['keys']}}
|
||||
</div>
|
||||
<br />
|
||||
@@ -249,8 +249,8 @@
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['png'])}}">png</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['svg'])}}">svg</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['pdf'])}}">pdf</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['ps'])}}">ps</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['dot'])}}">dot</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['ps'])}}">ps</a></li>
|
||||
<li><a href="{{=URL('appadmin', 'bg_graph_model', args=['dot'])}}">dot</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<br />
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
<meta name="google-site-verification" content="">
|
||||
<!-- include stylesheets -->
|
||||
<link rel="stylesheet" href="{{=URL('static','css/bootstrap.min.css')}}"/>
|
||||
<link rel="stylesheet" href="{{=URL('static','css/bootstrap-theme.min.css')}}"/>
|
||||
<link rel="stylesheet" href="{{=URL('static','css/web2py-bootstrap3.css')}}"/>
|
||||
<link rel="shortcut icon" href="{{=URL('static','images/favicon.ico')}}" type="image/x-icon">
|
||||
<link rel="apple-touch-icon" href="{{=URL('static','images/favicon.png')}}">
|
||||
@@ -47,9 +46,9 @@
|
||||
</head>
|
||||
<body>
|
||||
<!--[if lt IE 8]><p class="browserupgrade">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p><![endif]-->
|
||||
<div class="flash alert alert-dismissable">{{=response.flash or ''}}</div>
|
||||
<div class="w2p_flash alert alert-dismissable">{{=response.flash or ''}}</div>
|
||||
<!-- Navbar ======================================= -->
|
||||
<nav class="navbar navbar-inverse navbar-fixed-top" role="navigation">
|
||||
<nav class="navbar navbar-default navbar-fixed-top" role="navigation">
|
||||
<div class="container-fluid">
|
||||
<div class="navbar-header">
|
||||
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
|
||||
|
||||
@@ -120,7 +120,7 @@ args=()
|
||||
class=handlers.RotatingFileHandler
|
||||
level=DEBUG
|
||||
formatter=simpleFormatter
|
||||
args=("logs/web2py.log", "a", 1000000, 5)
|
||||
args=("web2py.log", "a", 1000000, 5)
|
||||
|
||||
[handler_osxSysLogHandler]
|
||||
class=handlers.SysLogHandler
|
||||
|
||||
Vendored
+150
@@ -0,0 +1,150 @@
|
||||
from fabric.api import *
|
||||
from fabric.operations import put, get
|
||||
from fabric.contrib.files import exists
|
||||
import os
|
||||
import datetime
|
||||
import getpass
|
||||
|
||||
env.hosts = env.hosts or raw_input('hostname (example.com):').split(',')
|
||||
env.user = env.user or raw_input('username :')
|
||||
|
||||
INSTALL_SCRIPT = "setup-web2py-nginx-uwsgi-ubuntu.sh"
|
||||
now = datetime.datetime.now()
|
||||
applications = '/home/www-data/web2py/applications'
|
||||
|
||||
def create_user(username):
|
||||
"""fab -H root@host create_user:username"""
|
||||
password = getpass.getpass(name+' password for %s> ' % username)
|
||||
run('useradd -m %s' % username)
|
||||
run('usermod --password %s %s' % (crypt.crypt(password, 'salt'), username))
|
||||
run('mkdir -p ~%s/.ssh' % username)
|
||||
run('cp /etc/sudoers /tmp/sudoers.new')
|
||||
append('/tmp/sudoers.new', '%s ALL=NOPASSWD: ALL' % username, use_sudo=True)
|
||||
run('visudo -c -f /tmp/sudoers.new')
|
||||
run('EDITOR="cp /tmp/sudoers.new" visudo')
|
||||
uncomment('~%s/.bashrc' % username, '#force_color_prompt=yes')
|
||||
local('ssh-copy-id %s' % env.hosts[0])
|
||||
|
||||
def install_web2py():
|
||||
"""fab -H username@host install_web2py"""
|
||||
sudo('wget https://raw.githubusercontent.com/web2py/web2py/master/scripts/%s' % INSTALL_SCRIPT)
|
||||
sudo('chmod +x %s' % INSTALL_SCRIPT)
|
||||
sudo('./'+INSTALL_SCRIPT)
|
||||
|
||||
def start_webserver():
|
||||
sudo('service nginx start')
|
||||
sudo('start uwsgi-emperor')
|
||||
sudo('start web2py-scheduler')
|
||||
|
||||
def stop_webserver():
|
||||
sudo('stop uwsgi-emperor')
|
||||
sudo('service nginx stop')
|
||||
sudo('stop web2py-scheduler')
|
||||
|
||||
def restart_webserver():
|
||||
stop_webserver()
|
||||
start_webserver()
|
||||
|
||||
def notify(appname=None):
|
||||
"""fab -H username@host notify:appname"""
|
||||
appname = appname or os.path.split(os.getcwd())[-1]
|
||||
appfolder = applications+'/'+appname
|
||||
with cd(appfolder):
|
||||
sudo('echo "response.flash = \'System Going Down For Maintenance\'" > models/flash_goingdown.py')
|
||||
|
||||
def down(appname=None):
|
||||
"""fab -H username@host down:appname"""
|
||||
appname = appname or os.path.split(os.getcwd())[-1]
|
||||
appfolder = applications+'/'+appname
|
||||
with cd(appfolder):
|
||||
sudo('echo `date` > DISABLED')
|
||||
sudo('rm -rf sessions/* || true')
|
||||
|
||||
def up(appname=None):
|
||||
"""fab -H username@host up:appname"""
|
||||
appname = appname or os.path.split(os.getcwd())[-1]
|
||||
appfolder = applications+'/'+appname
|
||||
with cd(appfolder):
|
||||
if exists('modules/flash_goingdown.py'):
|
||||
sudo('rm modules/flash_goingdown.py')
|
||||
sudo('rm DISABLED')
|
||||
|
||||
def mkdir_or_backup(appname):
|
||||
appfolder = applications+'/'+appname
|
||||
if not exists(appfolder):
|
||||
sudo('mkdir %s' % appfolder)
|
||||
sudo('chown -R www-data:www-data %s' % appfolder)
|
||||
backup = None
|
||||
else:
|
||||
dt = now.strftime('%y-%m-%d-%h-%m')
|
||||
backup = '%s.%s.zip' % (appname, dt)
|
||||
with cd(applications):
|
||||
sudo('zip -r %s %s' % (backup, appname))
|
||||
return backup
|
||||
|
||||
def git_deploy(appname, repo):
|
||||
"""fab -H username@host git_deploy:appname,username/remoname"""
|
||||
appfolder = applications+'/'+appname
|
||||
backup = mkdir_or_backup(appname)
|
||||
|
||||
if exists(appfolder):
|
||||
with cd(appfolder):
|
||||
sudo('git pull origin master')
|
||||
sudo('chown -R www-data:www-data *')
|
||||
else:
|
||||
with cd(applications):
|
||||
sudo('git clone git@github.com/%s %s' % (repo, name))
|
||||
sudo('chown -R www-data:www-data %s' % name)
|
||||
|
||||
def retrieve(appname=None):
|
||||
"""fab -H username@host retrieve:appname"""
|
||||
appname = appname or os.path.split(os.getcwd())[-1]
|
||||
appfolder = applications+'/'+appname
|
||||
filename = '%s.zip' % appname
|
||||
with cd(appfolder):
|
||||
sudo('zip -r /tmp/%s *' % filename)
|
||||
get('/tmp/%s' % filename, filename)
|
||||
sudo('rm /tmp/%s' % filename)
|
||||
local('unzip %s' % filename)
|
||||
local('rm %s' % filename)
|
||||
|
||||
def deploy(appname=None, all=False):
|
||||
"""fab -H username@host deploy:appname,all"""
|
||||
appname = appname or os.path.split(os.getcwd())[-1]
|
||||
appfolder = applications+'/'+appname
|
||||
if os.path.exists('_update.zip'):
|
||||
os.unlink('_update.zip')
|
||||
|
||||
backup = mkdir_or_backup(appname)
|
||||
|
||||
if all=='all' or not backup:
|
||||
local('zip -r _update.zip * -x *~ -x .* -x \#* -x *.bak -x *.bak2')
|
||||
else:
|
||||
local('zip -r _update.zip */*.py views/*.html views/*/*.html static/*')
|
||||
put('_update.zip','/tmp/_update.zip')
|
||||
|
||||
with cd(appfolder):
|
||||
sudo('unzip -o /tmp/_update.zip')
|
||||
sudo('chown -R www-data:www-data *')
|
||||
sudo('echo "%s" > DATE_DEPLOYMENT' % now)
|
||||
|
||||
if backup:
|
||||
print 'TO RESTORE: fab restore:%s' % backup
|
||||
|
||||
def restore(backup):
|
||||
"""fab -H username@host restore:backupfilename"""
|
||||
appname = backup.split('/')[-1].split('.')[0]
|
||||
appfolder = applications + '/' + appname
|
||||
with cd(appfolder):
|
||||
sudo('rm -r *')
|
||||
with cd(applications):
|
||||
sudo('unzip %s' % backup)
|
||||
sudo('chown -R www-data:www-data %s' % appname)
|
||||
|
||||
def cleanup(appname):
|
||||
appname = appname or os.path.split(os.getcwd())[-1]
|
||||
appfolder = applications + '/' + appname
|
||||
with cd(appfolder):
|
||||
sudo('rm -rf sessions/* || true')
|
||||
sudo('rm -rf errors/* || true')
|
||||
sudo('rm -rf cache/* || true')
|
||||
+6
-5
@@ -59,6 +59,8 @@ def app_pack(app, request, raise_ex=False, filenames=None):
|
||||
w2p_pack(filename, apath(app, request), filenames=filenames)
|
||||
return filename
|
||||
except Exception, e:
|
||||
import traceback
|
||||
print traceback.format_exc()
|
||||
if raise_ex:
|
||||
raise
|
||||
return False
|
||||
@@ -119,9 +121,8 @@ def app_cleanup(app, request):
|
||||
|
||||
# Remove cache files
|
||||
path = apath('%s/cache/' % app, request)
|
||||
CacheOnDisk(folder=path).clear()
|
||||
|
||||
if os.path.exists(path):
|
||||
CacheOnDisk(folder=path).clear()
|
||||
for f in os.listdir(path):
|
||||
try:
|
||||
if f[:1] != '.': recursive_unlink(os.path.join(path, f))
|
||||
@@ -130,7 +131,7 @@ def app_cleanup(app, request):
|
||||
return r
|
||||
|
||||
|
||||
def app_compile(app, request):
|
||||
def app_compile(app, request, skip_failed_views=False):
|
||||
"""Compiles the application
|
||||
|
||||
Args:
|
||||
@@ -144,8 +145,8 @@ def app_compile(app, request):
|
||||
from compileapp import compile_application, remove_compiled_application
|
||||
folder = apath(app, request)
|
||||
try:
|
||||
compile_application(folder)
|
||||
return None
|
||||
failed_views = compile_application(folder, skip_failed_views)
|
||||
return failed_views
|
||||
except (Exception, RestrictedError):
|
||||
tb = traceback.format_exc(sys.exc_info)
|
||||
remove_compiled_application(folder)
|
||||
|
||||
+14
-10
@@ -58,7 +58,7 @@ def remove_oldest_entries(storage, percentage=90):
|
||||
# compute current memory usage (%)
|
||||
old_mem = psutil.virtual_memory().percent
|
||||
# if we have data in storage and utilization exceeds 90%
|
||||
while storage and old_mem > percentage:
|
||||
while storage and old_mem > percentage:
|
||||
# removed oldest entry
|
||||
storage.popitem(last=False)
|
||||
# garbage collect
|
||||
@@ -378,7 +378,7 @@ class CacheOnDisk(CacheAbstract):
|
||||
|
||||
|
||||
def safe_apply(self, key, function, default_value=None):
|
||||
"""
|
||||
"""
|
||||
Safely apply a function to the value of a key in storage and set
|
||||
the return value of the function to it.
|
||||
|
||||
@@ -606,22 +606,26 @@ class Cache(object):
|
||||
def wrapped_f():
|
||||
if current.request.env.request_method != 'GET':
|
||||
return func()
|
||||
|
||||
if quick:
|
||||
session_ = True if 'S' in quick else False
|
||||
vars_ = True if 'V' in quick else False
|
||||
lang_ = True if 'L' in quick else False
|
||||
user_agent_ = True if 'U' in quick else False
|
||||
public_ = True if 'P' in quick else False
|
||||
else:
|
||||
(session_, vars_, lang_, user_agent_, public_) = \
|
||||
(session, vars, lang, user_agent, public)
|
||||
|
||||
if time_expire:
|
||||
cache_control = 'max-age=%(time_expire)s, s-maxage=%(time_expire)s' % dict(time_expire=time_expire)
|
||||
if quick:
|
||||
session_ = True if 'S' in quick else False
|
||||
vars_ = True if 'V' in quick else False
|
||||
lang_ = True if 'L' in quick else False
|
||||
user_agent_ = True if 'U' in quick else False
|
||||
public_ = True if 'P' in quick else False
|
||||
else:
|
||||
session_, vars_, lang_, user_agent_, public_ = session, vars, lang, user_agent, public
|
||||
if not session_ and public_:
|
||||
cache_control += ', public'
|
||||
expires = (current.request.utcnow + datetime.timedelta(seconds=time_expire)).strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||
else:
|
||||
cache_control += ', private'
|
||||
expires = 'Fri, 01 Jan 1990 00:00:00 GMT'
|
||||
|
||||
if cache_model:
|
||||
#figure out the correct cache key
|
||||
cache_key = [current.request.env.path_info, current.response.view]
|
||||
|
||||
+37
-34
@@ -464,22 +464,28 @@ def read_pyc(filename):
|
||||
return marshal.loads(data[8:])
|
||||
|
||||
|
||||
def compile_views(folder):
|
||||
def compile_views(folder, skip_failed_views=False):
|
||||
"""
|
||||
Compiles all the views in the application specified by `folder`
|
||||
"""
|
||||
|
||||
path = pjoin(folder, 'views')
|
||||
failed_views = []
|
||||
for fname in listdir(path, '^[\w/\-]+(\.\w+)*$'):
|
||||
try:
|
||||
data = parse_template(fname, path)
|
||||
except Exception, e:
|
||||
raise Exception("%s in %s" % (e, fname))
|
||||
filename = 'views.%s.py' % fname.replace(os.path.sep, '.')
|
||||
filename = pjoin(folder, 'compiled', filename)
|
||||
write_file(filename, data)
|
||||
save_pyc(filename)
|
||||
os.unlink(filename)
|
||||
if skip_failed_views:
|
||||
failed_views.append(fname)
|
||||
else:
|
||||
raise Exception("%s in %s" % (e, fname))
|
||||
else:
|
||||
filename = ('views/%s.py' % fname).replace('/', '_').replace('\\', '_')
|
||||
filename = pjoin(folder, 'compiled', filename)
|
||||
write_file(filename, data)
|
||||
save_pyc(filename)
|
||||
os.unlink(filename)
|
||||
return failed_views if failed_views else None
|
||||
|
||||
|
||||
def compile_models(folder):
|
||||
@@ -652,7 +658,7 @@ def run_view_in(environment):
|
||||
"""
|
||||
request = current.request
|
||||
response = current.response
|
||||
view = response.view
|
||||
view = environment['response'].view
|
||||
folder = request.folder
|
||||
path = pjoin(folder, 'compiled')
|
||||
badv = 'invalid view (%s)' % view
|
||||
@@ -667,32 +673,28 @@ def run_view_in(environment):
|
||||
ccode = parse_template(view, pjoin(folder, 'views'),
|
||||
context=environment)
|
||||
restricted(ccode, environment, 'file stream')
|
||||
elif os.path.exists(path):
|
||||
x = view.replace('/', '.')
|
||||
files = ['views.%s.pyc' % x]
|
||||
if allow_generic:
|
||||
files.append('views.generic.%s.pyc' % request.extension)
|
||||
# for backward compatibility
|
||||
x = view.replace('/', '_')
|
||||
files.append('views_%s.pyc' % x)
|
||||
if allow_generic:
|
||||
files.append('views_generic.%s.pyc' % request.extension)
|
||||
if request.extension == 'html':
|
||||
files.append('views_%s.pyc' % x[:-5])
|
||||
if allow_generic:
|
||||
files.append('views_generic.pyc')
|
||||
# end backward compatibility code
|
||||
for f in files:
|
||||
filename = pjoin(path, f)
|
||||
if os.path.exists(filename):
|
||||
code = read_pyc(filename)
|
||||
restricted(code, environment, layer=filename)
|
||||
return
|
||||
raise HTTP(404,
|
||||
rewrite.THREAD_LOCAL.routes.error_message % badv,
|
||||
web2py_error=badv)
|
||||
else:
|
||||
filename = pjoin(folder, 'views', view)
|
||||
if os.path.exists(path): # compiled views
|
||||
x = view.replace('/', '_')
|
||||
files = ['views_%s.pyc' % x]
|
||||
is_compiled = os.path.exists(pjoin(path, files[0]))
|
||||
# Don't use a generic view if the non-compiled view exists.
|
||||
if is_compiled or (not is_compiled and not os.path.exists(filename)):
|
||||
if allow_generic:
|
||||
files.append('views_generic.%s.pyc' % request.extension)
|
||||
# for backward compatibility
|
||||
if request.extension == 'html':
|
||||
files.append('views_%s.pyc' % x[:-5])
|
||||
if allow_generic:
|
||||
files.append('views_generic.pyc')
|
||||
# end backward compatibility code
|
||||
for f in files:
|
||||
compiled = pjoin(path, f)
|
||||
if os.path.exists(compiled):
|
||||
code = read_pyc(compiled)
|
||||
restricted(code, environment, layer=compiled)
|
||||
return
|
||||
if not os.path.exists(filename) and allow_generic:
|
||||
view = 'generic.' + request.extension
|
||||
filename = pjoin(folder, 'views', view)
|
||||
@@ -726,7 +728,7 @@ def remove_compiled_application(folder):
|
||||
pass
|
||||
|
||||
|
||||
def compile_application(folder):
|
||||
def compile_application(folder, skip_failed_views=False):
|
||||
"""
|
||||
Compiles all models, views, controller for the application in `folder`.
|
||||
"""
|
||||
@@ -734,7 +736,8 @@ def compile_application(folder):
|
||||
os.mkdir(pjoin(folder, 'compiled'))
|
||||
compile_models(folder)
|
||||
compile_controllers(folder)
|
||||
compile_views(folder)
|
||||
failed_views = compile_views(folder, skip_failed_views)
|
||||
return failed_views
|
||||
|
||||
|
||||
def test():
|
||||
|
||||
@@ -35,7 +35,6 @@ from gluon.serializers import json_parser
|
||||
|
||||
locker = thread.allocate_lock()
|
||||
|
||||
|
||||
def AppConfig(*args, **vars):
|
||||
|
||||
locker.acquire()
|
||||
|
||||
@@ -93,7 +93,7 @@ def video(url):
|
||||
|
||||
|
||||
def googledoc_viewer(url):
|
||||
return '<iframe src="http://docs.google.com/viewer?url=%s&embedded=true" style="max-width:100%%"></iframe>' % urllib.quote(url)
|
||||
return '<iframe src="https://docs.google.com/viewer?url=%s&embedded=true" style="max-width:100%%"></iframe>' % urllib.quote(url)
|
||||
|
||||
|
||||
def web2py_component(url):
|
||||
|
||||
@@ -6,15 +6,17 @@
|
||||
#
|
||||
# Given the model
|
||||
#
|
||||
# db.define_table("table_name", Field("picture", "upload"), Field("thumbnail", "upload"))
|
||||
# db.define_table("table_name", Field("picture", "upload"),
|
||||
# Field("thumbnail", "upload"))
|
||||
#
|
||||
# # to resize the picture on upload
|
||||
# to resize the picture on upload
|
||||
#
|
||||
# from images import RESIZE
|
||||
#
|
||||
# db.table_name.picture.requires = RESIZE(200, 200)
|
||||
#
|
||||
# # to store original image in picture and create a thumbnail in 'thumbnail' field
|
||||
# to store original image in picture and create a thumbnail
|
||||
# in 'thumbnail' field
|
||||
#
|
||||
# from images import THUMB
|
||||
# db.table_name.thumbnail.compute = lambda row: THUMB(row.picture, 200, 200)
|
||||
@@ -24,8 +26,11 @@ from gluon import current
|
||||
|
||||
|
||||
class RESIZE(object):
|
||||
def __init__(self, nx=160, ny=80, error_message=' image resize'):
|
||||
(self.nx, self.ny, self.error_message) = (nx, ny, error_message)
|
||||
|
||||
def __init__(self, nx=160, ny=80, quality=100,
|
||||
error_message=' image resize'):
|
||||
(self.nx, self.ny, self.quality, self.error_message) = (
|
||||
nx, ny, quality, error_message)
|
||||
|
||||
def __call__(self, value):
|
||||
if isinstance(value, str) and len(value) == 0:
|
||||
@@ -36,7 +41,7 @@ class RESIZE(object):
|
||||
img = Image.open(value.file)
|
||||
img.thumbnail((self.nx, self.ny), Image.ANTIALIAS)
|
||||
s = cStringIO.StringIO()
|
||||
img.save(s, 'JPEG', quality=100)
|
||||
img.save(s, 'JPEG', quality=self.quality)
|
||||
s.seek(0)
|
||||
value.file = s
|
||||
except:
|
||||
@@ -51,7 +56,7 @@ def THUMB(image, nx=120, ny=120, gae=False, name='thumb'):
|
||||
request = current.request
|
||||
from PIL import Image
|
||||
import os
|
||||
img = Image.open(os.path.join(request.folder,'uploads',image))
|
||||
img = Image.open(os.path.join(request.folder, 'uploads', image))
|
||||
img.thumbnail((nx, ny), Image.ANTIALIAS)
|
||||
root, ext = os.path.splitext(image)
|
||||
thumb = '%s_%s%s' % (root, name, ext)
|
||||
|
||||
@@ -14,12 +14,20 @@ except Exception, e:
|
||||
raise e
|
||||
|
||||
|
||||
def ldap_auth(server='ldap', port=None,
|
||||
def ldap_auth(server='ldap',
|
||||
port=None,
|
||||
base_dn='ou=users,dc=domain,dc=com',
|
||||
mode='uid', secure=False,
|
||||
cert_path=None, cert_file=None,
|
||||
cacert_path=None, cacert_file=None, key_file=None,
|
||||
bind_dn=None, bind_pw=None, filterstr='objectClass=*',
|
||||
mode='uid',
|
||||
secure=False,
|
||||
self_signed_certificate=None, # See NOTE below
|
||||
cert_path=None,
|
||||
cert_file=None,
|
||||
cacert_path=None,
|
||||
cacert_file=None,
|
||||
key_file=None,
|
||||
bind_dn=None,
|
||||
bind_pw=None,
|
||||
filterstr='objectClass=*',
|
||||
username_attrib='uid',
|
||||
custom_scope='subtree',
|
||||
allowed_groups=None,
|
||||
@@ -33,6 +41,7 @@ def ldap_auth(server='ldap', port=None,
|
||||
group_name_attrib='cn',
|
||||
group_member_attrib='memberUid',
|
||||
group_filterstr='objectClass=*',
|
||||
tls=False,
|
||||
logging_level='error'):
|
||||
|
||||
"""
|
||||
@@ -80,6 +89,13 @@ def ldap_auth(server='ldap', port=None,
|
||||
If ldap is using GnuTLS then you need cert_file="..." instead cert_path
|
||||
because cert_path isn't implemented in GnuTLS :(
|
||||
|
||||
To enable TLS, set tls=True:
|
||||
|
||||
auth.settings.login_methods.append(ldap_auth(
|
||||
server='my.ldap.server',
|
||||
base_dn='ou=Users,dc=domain,dc=com',
|
||||
tls=True))
|
||||
|
||||
If you need to bind to the directory with an admin account in order to
|
||||
search it then specify bind_dn & bind_pw to use for this.
|
||||
- currently only implemented for Active Directory
|
||||
@@ -151,6 +167,14 @@ def ldap_auth(server='ldap', port=None,
|
||||
You can set the logging level with the "logging_level" parameter, default
|
||||
is "error" and can be set to error, warning, info, debug.
|
||||
"""
|
||||
|
||||
if self_signed_certificate:
|
||||
# NOTE : If you have a self-signed SSL Certificate pointing over "port=686" and "secure=True" alone
|
||||
# will not work, you need also to set "self_signed_certificate=True".
|
||||
# Ref1: https://onemoretech.wordpress.com/2015/06/25/connecting-to-ldap-over-self-signed-tls-with-python/
|
||||
# Ref2: http://bneijt.nl/blog/post/connecting-to-ldaps-with-self-signed-cert-using-python/
|
||||
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
|
||||
|
||||
logger = logging.getLogger('web2py.auth.ldap_auth')
|
||||
if logging_level == 'error':
|
||||
logger.setLevel(logging.ERROR)
|
||||
@@ -188,8 +212,7 @@ def ldap_auth(server='ldap', port=None,
|
||||
logger.warning('blank password not allowed')
|
||||
return False
|
||||
logger.debug('mode: [%s] manage_user: [%s] custom_scope: [%s]'
|
||||
' manage_groups: [%s]' % (str(mode), str(manage_user),
|
||||
str(custom_scope), str(manage_groups)))
|
||||
' manage_groups: [%s]' % (str(mode), str(manage_user), str(custom_scope), str(manage_groups)))
|
||||
if manage_user:
|
||||
if user_firstname_attrib.count(':') > 0:
|
||||
(user_firstname_attrib,
|
||||
@@ -238,14 +261,10 @@ def ldap_auth(server='ldap', port=None,
|
||||
# in the ldap_basedn
|
||||
requested_attrs = ['sAMAccountName']
|
||||
if manage_user:
|
||||
requested_attrs.extend([user_firstname_attrib,
|
||||
user_lastname_attrib,
|
||||
user_mail_attrib])
|
||||
requested_attrs.extend([user_firstname_attrib, user_lastname_attrib, user_mail_attrib])
|
||||
result = con.search_ext_s(
|
||||
ldap_basedn, ldap.SCOPE_SUBTREE,
|
||||
"(&(sAMAccountName=%s)(%s))" % (
|
||||
ldap.filter.escape_filter_chars(username_bare),
|
||||
filterstr),
|
||||
"(&(sAMAccountName=%s)(%s))" % (ldap.filter.escape_filter_chars(username_bare), filterstr),
|
||||
requested_attrs)[0][1]
|
||||
if not isinstance(result, dict):
|
||||
# result should be a dict in the form
|
||||
@@ -278,25 +297,21 @@ def ldap_auth(server='ldap', port=None,
|
||||
if manage_user:
|
||||
result = con.search_s(dn, ldap.SCOPE_BASE,
|
||||
"(objectClass=*)",
|
||||
[user_firstname_attrib,
|
||||
user_lastname_attrib,
|
||||
user_mail_attrib])[0][1]
|
||||
[user_firstname_attrib, user_lastname_attrib, user_mail_attrib])[0][1]
|
||||
|
||||
if ldap_mode == 'uid':
|
||||
# OpenLDAP (UID)
|
||||
if ldap_binddn and ldap_bindpw:
|
||||
con.simple_bind_s(ldap_binddn, ldap_bindpw)
|
||||
dn = "uid=" + username + "," + ldap_basedn
|
||||
dn = con.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, "(uid=%s)"%username, [''])[0][0]
|
||||
dn = con.search_s(ldap_basedn, ldap.SCOPE_SUBTREE, "(uid=%s)" % username, [''])[0][0]
|
||||
else:
|
||||
dn = "uid=" + username + "," + ldap_basedn
|
||||
con.simple_bind_s(dn, password)
|
||||
if manage_user:
|
||||
result = con.search_s(dn, ldap.SCOPE_BASE,
|
||||
"(objectClass=*)",
|
||||
[user_firstname_attrib,
|
||||
user_lastname_attrib,
|
||||
user_mail_attrib])[0][1]
|
||||
[user_firstname_attrib, user_lastname_attrib, user_mail_attrib])[0][1]
|
||||
|
||||
if ldap_mode == 'company':
|
||||
# no DNs or password needed to search directory
|
||||
@@ -311,9 +326,7 @@ def ldap_auth(server='ldap', port=None,
|
||||
# find the uid
|
||||
attrs = ['uid']
|
||||
if manage_user:
|
||||
attrs.extend([user_firstname_attrib,
|
||||
user_lastname_attrib,
|
||||
user_mail_attrib])
|
||||
attrs.extend([user_firstname_attrib, user_lastname_attrib, user_mail_attrib])
|
||||
# perform the actual search
|
||||
company_search_result = con.search_s(ldap_basedn,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
@@ -329,13 +342,11 @@ def ldap_auth(server='ldap', port=None,
|
||||
basedns = ldap_basedn
|
||||
else:
|
||||
basedns = [ldap_basedn]
|
||||
filter = '(&(uid=%s)(%s))' % (
|
||||
ldap.filter.escape_filter_chars(username), filterstr)
|
||||
filter = '(&(uid=%s)(%s))' % (ldap.filter.escape_filter_chars(username), filterstr)
|
||||
found = False
|
||||
for basedn in basedns:
|
||||
try:
|
||||
result = con.search_s(basedn, ldap.SCOPE_SUBTREE,
|
||||
filter)
|
||||
result = con.search_s(basedn, ldap.SCOPE_SUBTREE, filter)
|
||||
if result:
|
||||
user_dn = result[0][0]
|
||||
# Check the password
|
||||
@@ -344,9 +355,10 @@ def ldap_auth(server='ldap', port=None,
|
||||
break
|
||||
except ldap.LDAPError, detail:
|
||||
(exc_type, exc_value) = sys.exc_info()[:2]
|
||||
logger.warning(
|
||||
"ldap_auth: searching %s for %s resulted in %s: %s\n" %
|
||||
(basedn, filter, exc_type, exc_value)
|
||||
logger.warning("ldap_auth: searching %s for %s resulted in %s: %s\n" % (basedn,
|
||||
filter,
|
||||
exc_type,
|
||||
exc_value)
|
||||
)
|
||||
if not found:
|
||||
logger.warning('User [%s] not found!' % username)
|
||||
@@ -359,10 +371,7 @@ def ldap_auth(server='ldap', port=None,
|
||||
basedns = ldap_basedn
|
||||
else:
|
||||
basedns = [ldap_basedn]
|
||||
filter = '(&(%s=%s)(%s))' % (username_attrib,
|
||||
ldap.filter.escape_filter_chars(
|
||||
username),
|
||||
filterstr)
|
||||
filter = '(&(%s=%s)(%s))' % (username_attrib, ldap.filter.escape_filter_chars(username), filterstr)
|
||||
if custom_scope == 'subtree':
|
||||
ldap_scope = ldap.SCOPE_SUBTREE
|
||||
elif custom_scope == 'base':
|
||||
@@ -381,9 +390,10 @@ def ldap_auth(server='ldap', port=None,
|
||||
break
|
||||
except ldap.LDAPError, detail:
|
||||
(exc_type, exc_value) = sys.exc_info()[:2]
|
||||
logger.warning(
|
||||
"ldap_auth: searching %s for %s resulted in %s: %s\n" %
|
||||
(basedn, filter, exc_type, exc_value)
|
||||
logger.warning("ldap_auth: searching %s for %s resulted in %s: %s\n" % (basedn,
|
||||
filter,
|
||||
exc_type,
|
||||
exc_value)
|
||||
)
|
||||
if not found:
|
||||
logger.warning('User [%s] not found!' % username)
|
||||
@@ -393,16 +403,14 @@ def ldap_auth(server='ldap', port=None,
|
||||
logger.info('[%s] Manage user data' % str(username))
|
||||
try:
|
||||
if user_firstname_part is not None:
|
||||
store_user_firstname = result[user_firstname_attrib][
|
||||
0].split(' ', 1)[user_firstname_part]
|
||||
store_user_firstname = result[user_firstname_attrib][0].split(' ', 1)[user_firstname_part]
|
||||
else:
|
||||
store_user_firstname = result[user_firstname_attrib][0]
|
||||
except KeyError, e:
|
||||
store_user_firstname = None
|
||||
try:
|
||||
if user_lastname_part is not None:
|
||||
store_user_lastname = result[user_lastname_attrib][
|
||||
0].split(' ', 1)[user_lastname_part]
|
||||
store_user_lastname = result[user_lastname_attrib][0].split(' ', 1)[user_lastname_part]
|
||||
else:
|
||||
store_user_lastname = result[user_lastname_attrib][0]
|
||||
except KeyError, e:
|
||||
@@ -411,32 +419,27 @@ def ldap_auth(server='ldap', port=None,
|
||||
store_user_mail = result[user_mail_attrib][0]
|
||||
except KeyError, e:
|
||||
store_user_mail = None
|
||||
try:
|
||||
#
|
||||
update_or_insert_values = {'first_name': store_user_firstname,
|
||||
'last_name': store_user_lastname,
|
||||
'email': store_user_mail}
|
||||
if '@' not in username:
|
||||
# user as username
|
||||
# #################
|
||||
# ################
|
||||
fields = ['first_name', 'last_name', 'email']
|
||||
user_in_db = db(db.auth_user.username == username)
|
||||
if user_in_db.count() > 0:
|
||||
user_in_db.update(first_name=store_user_firstname,
|
||||
last_name=store_user_lastname,
|
||||
email=store_user_mail)
|
||||
else:
|
||||
db.auth_user.insert(first_name=store_user_firstname,
|
||||
last_name=store_user_lastname,
|
||||
email=store_user_mail,
|
||||
username=username)
|
||||
except:
|
||||
#
|
||||
elif '@' in username:
|
||||
# user as email
|
||||
# ##############
|
||||
# #############
|
||||
fields = ['first_name', 'last_name']
|
||||
user_in_db = db(db.auth_user.email == username)
|
||||
if user_in_db.count() > 0:
|
||||
user_in_db.update(first_name=store_user_firstname,
|
||||
last_name=store_user_lastname)
|
||||
else:
|
||||
db.auth_user.insert(first_name=store_user_firstname,
|
||||
last_name=store_user_lastname,
|
||||
email=username)
|
||||
update_or_insert_values = dict(((f, update_or_insert_values[f]) for f in fields))
|
||||
|
||||
if user_in_db.count() > 0:
|
||||
actual_values = user_in_db.select(*[db.auth_user[f] for f in fields]).first().as_dict()
|
||||
if update_or_insert_values != actual_values: # We don't update record if values are the same
|
||||
user_in_db.update(**update_or_insert_values)
|
||||
else:
|
||||
db.auth_user.insert(**update_or_insert_values)
|
||||
con.unbind()
|
||||
|
||||
if manage_groups:
|
||||
@@ -478,9 +481,7 @@ def ldap_auth(server='ldap', port=None,
|
||||
# No match
|
||||
return False
|
||||
|
||||
def do_manage_groups(username,
|
||||
password=None,
|
||||
db=db):
|
||||
def do_manage_groups(username, password=None, db=db):
|
||||
"""
|
||||
Manage user groups
|
||||
|
||||
@@ -500,23 +501,19 @@ def ldap_auth(server='ldap', port=None,
|
||||
# Get all group name where the user is in actually in local db
|
||||
# #############################################################
|
||||
try:
|
||||
db_user_id = db(db.auth_user.username == username).select(
|
||||
db.auth_user.id).first().id
|
||||
db_user_id = db(db.auth_user.username == username).select(db.auth_user.id).first().id
|
||||
except:
|
||||
try:
|
||||
db_user_id = db(db.auth_user.email == username).select(
|
||||
db.auth_user.id).first().id
|
||||
db_user_id = db(db.auth_user.email == username).select(db.auth_user.id).first().id
|
||||
except AttributeError, e:
|
||||
#
|
||||
# There is no user in local db
|
||||
# We create one
|
||||
# ##############################
|
||||
try:
|
||||
db_user_id = db.auth_user.insert(username=username,
|
||||
first_name=username)
|
||||
db_user_id = db.auth_user.insert(username=username, first_name=username)
|
||||
except AttributeError, e:
|
||||
db_user_id = db.auth_user.insert(email=username,
|
||||
first_name=username)
|
||||
db_user_id = db.auth_user.insert(email=username, first_name=username)
|
||||
if not db_user_id:
|
||||
logging.error(
|
||||
'There is no username or email for %s!' % username)
|
||||
@@ -524,27 +521,23 @@ def ldap_auth(server='ldap', port=None,
|
||||
# if old pydal version, assume this is a relational database which can do joins
|
||||
db_can_join = db.can_join() if hasattr(db, 'can_join') else True
|
||||
if db_can_join:
|
||||
db_group_search = db(
|
||||
(db.auth_membership.user_id == db_user_id) &
|
||||
(db.auth_user.id == db.auth_membership.user_id) &
|
||||
(db.auth_group.id == db.auth_membership.group_id))
|
||||
db_group_search = \
|
||||
db((db.auth_membership.user_id == db_user_id) &
|
||||
(db.auth_user.id == db.auth_membership.user_id) &
|
||||
(db.auth_group.id == db.auth_membership.group_id))
|
||||
else:
|
||||
# no joins on NoSQL databases, perform two queries
|
||||
db_group_search = db(db.auth_membership.user_id == db_user_id)
|
||||
group_ids = [x.group_id for x in db_group_search.select(
|
||||
db.auth_membership.group_id, distinct=True)]
|
||||
group_ids = [x.group_id for x in db_group_search.select(db.auth_membership.group_id, distinct=True)]
|
||||
db_group_search = db(db.auth_group.id.belongs(group_ids))
|
||||
db_groups_of_the_user = list()
|
||||
db_group_id = dict()
|
||||
|
||||
if db_group_search.count() > 0:
|
||||
for group in db_group_search.select(db.auth_group.id,
|
||||
db.auth_group.role,
|
||||
distinct=True):
|
||||
for group in db_group_search.select(db.auth_group.id, db.auth_group.role, distinct=True):
|
||||
db_group_id[group.role] = group.id
|
||||
db_groups_of_the_user.append(group.role)
|
||||
logging.debug('db groups of user %s: %s' %
|
||||
(username, str(db_groups_of_the_user)))
|
||||
logging.debug('db groups of user %s: %s' % (username, str(db_groups_of_the_user)))
|
||||
|
||||
#
|
||||
# Delete user membership from groups where user is not anymore
|
||||
@@ -552,8 +545,7 @@ def ldap_auth(server='ldap', port=None,
|
||||
for group_to_del in db_groups_of_the_user:
|
||||
if ldap_groups_of_the_user.count(group_to_del) == 0:
|
||||
db((db.auth_membership.user_id == db_user_id) &
|
||||
(db.auth_membership.group_id == \
|
||||
db_group_id[group_to_del])).delete()
|
||||
(db.auth_membership.group_id == db_group_id[group_to_del])).delete()
|
||||
|
||||
#
|
||||
# Create user membership in groups where user is not in already
|
||||
@@ -561,16 +553,12 @@ def ldap_auth(server='ldap', port=None,
|
||||
for group_to_add in ldap_groups_of_the_user:
|
||||
if db_groups_of_the_user.count(group_to_add) == 0:
|
||||
if db(db.auth_group.role == group_to_add).count() == 0:
|
||||
gid = db.auth_group.insert(role=group_to_add,
|
||||
description='Generated from LDAP')
|
||||
gid = db.auth_group.insert(role=group_to_add, description='Generated from LDAP')
|
||||
else:
|
||||
gid = db(db.auth_group.role == group_to_add).select(
|
||||
db.auth_group.id).first().id
|
||||
db.auth_membership.insert(user_id=db_user_id,
|
||||
group_id=gid)
|
||||
gid = db(db.auth_group.role == group_to_add).select(db.auth_group.id).first().id
|
||||
db.auth_membership.insert(user_id=db_user_id, group_id=gid)
|
||||
except:
|
||||
logger.warning("[%s] Groups are not managed successfully!" %
|
||||
str(username))
|
||||
logger.warning("[%s] Groups are not managed successfully!" % str(username))
|
||||
import traceback
|
||||
logger.debug(traceback.format_exc())
|
||||
return False
|
||||
@@ -610,6 +598,8 @@ def ldap_auth(server='ldap', port=None,
|
||||
ldap_port = 389
|
||||
con = ldap.initialize(
|
||||
"ldap://" + ldap_server + ":" + str(ldap_port))
|
||||
if tls:
|
||||
con.start_tls_s()
|
||||
return con
|
||||
|
||||
def get_user_groups_from_ldap(username,
|
||||
@@ -659,10 +649,12 @@ def ldap_auth(server='ldap', port=None,
|
||||
con.simple_bind_s(username, password)
|
||||
logger.debug('Ldap username connect...')
|
||||
# We have to use the full string
|
||||
username = con.search_ext_s(base_dn, ldap.SCOPE_SUBTREE,
|
||||
"(&(sAMAccountName=%s)(%s))" %
|
||||
(ldap.filter.escape_filter_chars(username_bare),
|
||||
filterstr), ["cn"])[0][0]
|
||||
username = \
|
||||
con.search_ext_s(base_dn,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
"(&(sAMAccountName=%s)(%s))" % (ldap.filter.escape_filter_chars(username_bare),
|
||||
filterstr),
|
||||
["cn"])[0][0]
|
||||
else:
|
||||
if ldap_binddn:
|
||||
# need to search directory with an bind_dn account 1st
|
||||
@@ -675,18 +667,14 @@ def ldap_auth(server='ldap', port=None,
|
||||
if username is None:
|
||||
return list()
|
||||
# search for groups where user is in
|
||||
filter = '(&(%s=%s)(%s))' % (ldap.filter.escape_filter_chars(
|
||||
group_member_attrib
|
||||
),
|
||||
filter = '(&(%s=%s)(%s))' % (ldap.filter.escape_filter_chars(group_member_attrib),
|
||||
ldap.filter.escape_filter_chars(username),
|
||||
group_filterstr)
|
||||
group_search_result = con.search_s(group_dn,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
filter, [group_name_attrib])
|
||||
group_search_result = con.search_s(group_dn, ldap.SCOPE_SUBTREE, filter, [group_name_attrib])
|
||||
ldap_groups_of_the_user = list()
|
||||
for group_row in group_search_result:
|
||||
group = group_row[1]
|
||||
if type(group) == dict and group.has_key(group_name_attrib):
|
||||
if type(group) == dict and group_name_attrib in group:
|
||||
ldap_groups_of_the_user.extend(group[group_name_attrib])
|
||||
|
||||
con.unbind()
|
||||
|
||||
@@ -139,24 +139,36 @@ server for requests. It can be used for the optional"scope" parameters for Face
|
||||
Return the access token generated by the authenticating server.
|
||||
|
||||
If token is already in the session that one will be used.
|
||||
If token has expired refresh_token is used to get another token.
|
||||
Otherwise the token is fetched from the auth server.
|
||||
|
||||
"""
|
||||
refresh_token = None
|
||||
if current.session.token and 'expires' in current.session.token:
|
||||
expires = current.session.token['expires']
|
||||
# reuse token until expiration
|
||||
if expires == 0 or expires > time.time():
|
||||
return current.session.token['access_token']
|
||||
return current.session.token['access_token']
|
||||
if 'refresh_token' in current.session.token:
|
||||
refresh_token = current.session.token['refresh_token']
|
||||
|
||||
code = current.request.vars.code
|
||||
|
||||
if code:
|
||||
data = dict(client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
redirect_uri=current.session.redirect_uri,
|
||||
code=code,
|
||||
grant_type='authorization_code'
|
||||
)
|
||||
if code or refresh_token:
|
||||
data = dict(
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
)
|
||||
if code:
|
||||
data.update(
|
||||
redirect_uri=current.session.redirect_uri,
|
||||
code=code,
|
||||
grant_type='authorization_code'
|
||||
)
|
||||
elif refresh_token:
|
||||
data.update(
|
||||
refresh_token=refresh_token,
|
||||
grant_type='refresh_token'
|
||||
)
|
||||
|
||||
open_url = None
|
||||
opener = self.__build_url_opener(self.token_url)
|
||||
|
||||
@@ -13,6 +13,7 @@ Include in your model (eg db.py)::
|
||||
|
||||
auth.define_tables(username=True)
|
||||
from gluon.contrib.login_methods.saml2_auth import Saml2Auth
|
||||
import os
|
||||
auth.settings.login_form=Saml2Auth(
|
||||
config_file = os.path.join(request.folder,'private','sp_conf'),
|
||||
maps=dict(
|
||||
@@ -20,10 +21,59 @@ Include in your model (eg db.py)::
|
||||
email=lambda v: v['http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn'][0],
|
||||
user_id=lambda v: v['http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn'][0]))
|
||||
|
||||
you must have private/sp_conf.py, the pysaml2 sp configuration file
|
||||
you must have private/sp_conf.py, the pysaml2 sp configuration file. For example:
|
||||
|
||||
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from saml2 import BINDING_HTTP_POST, BINDING_HTTP_REDIRECT
|
||||
import os.path
|
||||
import requests
|
||||
import tempfile
|
||||
|
||||
BASEDIR = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# Web2py SP url and application name
|
||||
HOST = 'http://127.0.0.1:8000'
|
||||
APP = 'sp'
|
||||
|
||||
# To load the IDP metadata...
|
||||
IDP_METADATA = 'http://127.0.0.1:8088/metadata'
|
||||
|
||||
def full_path(local_file):
|
||||
return os.path.join(BASEDIR, local_file)
|
||||
|
||||
CONFIG = {
|
||||
# your entity id, usually your subdomain plus the url to the metadata view.
|
||||
'entityid': '%s/%s/default/metadata' % (HOST, APP),
|
||||
'service': {
|
||||
'sp' : {
|
||||
'name': 'MYSP',
|
||||
'endpoints': {
|
||||
'assertion_consumer_service': [
|
||||
('%s/%s/default/user/login' % (HOST, APP), BINDING_HTTP_REDIRECT),
|
||||
('%s/%s/default/user/login' % (HOST, APP), BINDING_HTTP_POST),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
# Your private and public key.
|
||||
'key_file': full_path('pki/mykey.pem'),
|
||||
'cert_file': full_path('pki/mycert.pem'),
|
||||
|
||||
# where the remote metadata is stored
|
||||
'metadata': {
|
||||
"remote": [{
|
||||
"url": IDP_METADATA,
|
||||
"cert":full_path('pki/mycert.pem')
|
||||
}]
|
||||
},
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
from saml2 import BINDING_HTTP_REDIRECT
|
||||
from saml2 import BINDING_HTTP_REDIRECT, BINDING_HTTP_POST
|
||||
from saml2.client import Saml2Client
|
||||
from gluon.utils import web2py_uuid
|
||||
from gluon import current, redirect, URL
|
||||
@@ -59,10 +109,13 @@ def saml2_handler(session, request, config_filename = None):
|
||||
client = Saml2Client(config_file = config_filename)
|
||||
idps = client.metadata.with_descriptor("idpsso")
|
||||
entityid = idps.keys()[0]
|
||||
bindings = [BINDING_HTTP_REDIRECT]
|
||||
bindings = [BINDING_HTTP_REDIRECT, BINDING_HTTP_POST]
|
||||
binding, destination = client.pick_binding(
|
||||
"single_sign_on_service", bindings, "idpsso", entity_id=entityid)
|
||||
binding = BINDING_HTTP_REDIRECT
|
||||
if request.env.request_method == 'GET':
|
||||
binding = BINDING_HTTP_REDIRECT
|
||||
elif request.env.request_method == 'POST':
|
||||
binding = BINDING_HTTP_POST
|
||||
if not request.vars.SAMLResponse:
|
||||
req_id, req = client.create_authn_request(destination, binding=binding)
|
||||
relay_state = web2py_uuid().replace('-','')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf8 -*-
|
||||
# -*- coding: utf-8 -*-
|
||||
# Plural-Forms for fr (French))
|
||||
|
||||
nplurals=2 # French language has 2 forms:
|
||||
@@ -15,3 +15,55 @@ get_plural_id = lambda n: int(n != 1)
|
||||
# for words (or phrases) not found in plural_dict dictionary
|
||||
# construct_plural_form = lambda word, plural_id: (word + 'suffix')
|
||||
|
||||
irregular={
|
||||
'aïeul': 'aïeux',
|
||||
'bonhomme': 'bonshommes',
|
||||
'ciel': 'cieux',
|
||||
'oeil': 'yeux',
|
||||
'œil': 'yeux',
|
||||
'madame': 'mesdames',
|
||||
'mademoiselle': 'mesdemoiselles',
|
||||
'monsieur': 'messieurs',
|
||||
'bijou': 'bijoux',
|
||||
'caillou': 'cailloux',
|
||||
'chou': 'choux',
|
||||
'genou': 'genoux',
|
||||
'hibou': 'hiboux',
|
||||
'joujou': 'joujoux',
|
||||
'pou': 'poux',
|
||||
'corail': ' coraux',
|
||||
'émail': 'émaux',
|
||||
'travail': 'travaux',
|
||||
'vitrail': 'vitraux',
|
||||
'soupirail': 'soupiraux',
|
||||
'bail': 'baux',
|
||||
'fermail': 'fermaux',
|
||||
'ventail': 'ventaux',
|
||||
'bleu': 'bleus',
|
||||
'pneu': 'pneus',
|
||||
'émeu': 'émeus',
|
||||
'enfeu': 'enfeus',
|
||||
#'lieu': 'lieus', # poisson
|
||||
|
||||
}
|
||||
|
||||
def construct_plural_form(word, plural_id):
|
||||
u"""
|
||||
>>> [construct_plural_form(x, 1) for x in \
|
||||
[ 'bleu', 'nez', 'sex', 'bas', 'gruau', 'jeu', 'journal',\
|
||||
'chose' ]]
|
||||
['bleus', 'nez', 'sex', 'bas', 'gruaux', 'jeux', 'journaux', 'choses']
|
||||
"""
|
||||
if word in irregular:
|
||||
return irregular[word]
|
||||
if word[-1:] in ('s', 'x', 'z'):
|
||||
return word
|
||||
if word[-2:] in ('au', 'eu'):
|
||||
return word + 'x'
|
||||
if word[-2:] == 'al':
|
||||
return word[0:-2] + 'aux'
|
||||
return word + 's'
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
|
||||
@@ -2,20 +2,20 @@
|
||||
Developed by niphlod@gmail.com
|
||||
Released under web2py license because includes gluon/cache.py source code
|
||||
"""
|
||||
import redis
|
||||
from redis.exceptions import ConnectionError
|
||||
from gluon import current
|
||||
from gluon.cache import CacheAbstract
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
import cPickle as pickle
|
||||
except:
|
||||
import pickle
|
||||
import pickle
|
||||
import time
|
||||
import re
|
||||
import logging
|
||||
import thread
|
||||
import random
|
||||
|
||||
from gluon import current
|
||||
from gluon.cache import CacheAbstract
|
||||
from gluon.contrib.redis_utils import acquire_lock, release_lock
|
||||
from gluon.contrib.redis_utils import register_release_lock, RConnectionError
|
||||
|
||||
logger = logging.getLogger("web2py.cache.redis")
|
||||
|
||||
@@ -24,17 +24,22 @@ locker = thread.allocate_lock()
|
||||
|
||||
def RedisCache(*args, **vars):
|
||||
"""
|
||||
Usage example: put in models
|
||||
Usage example: put in models::
|
||||
|
||||
from gluon.contrib.redis_cache import RedisCache
|
||||
cache.redis = RedisCache('localhost:6379',db=None, debug=True, with_lock=True, password=None)
|
||||
from gluon.contrib.redis_utils import RConn
|
||||
rconn = RConn()
|
||||
from gluon.contrib.redis_cache import RedisCache
|
||||
cache.redis = RedisCache(redis_conn=rconn, debug=True, with_lock=True)
|
||||
|
||||
:param db: redis db to use (0..16)
|
||||
:param debug: if True adds to stats() the total_hits and misses
|
||||
:param with_lock: sets the default locking mode for creating new keys.
|
||||
Args:
|
||||
redis_conn: a redis-like connection object
|
||||
debug: if True adds to stats() the total_hits and misses
|
||||
with_lock: sets the default locking mode for creating new keys.
|
||||
By default is False (usualy when you choose Redis you do it
|
||||
for performances reason)
|
||||
When True, only one thread/process can set a value concurrently
|
||||
fail_gracefully: if redis is unavailable, returns the value computing it
|
||||
instead of raising an exception
|
||||
|
||||
When you use cache.redis directly you can use
|
||||
value = cache.redis('mykey', lambda: time.time(), with_lock=True)
|
||||
@@ -81,22 +86,19 @@ class RedisClient(object):
|
||||
MAX_RETRIES = 5
|
||||
RETRIES = 0
|
||||
|
||||
def __init__(self, server='localhost:6379', db=None, debug=False, with_lock=False, password=None):
|
||||
self.server = server
|
||||
self.password = password
|
||||
self.db = db or 0
|
||||
host, port = (self.server.split(':') + ['6379'])[:2]
|
||||
port = int(port)
|
||||
def __init__(self, redis_conn=None, debug=False,
|
||||
with_lock=False, fail_gracefully=False):
|
||||
self.request = current.request
|
||||
self.debug = debug
|
||||
self.with_lock = with_lock
|
||||
self.prefix = "w2p:%s:" % (self.request.application)
|
||||
self.fail_gracefully = fail_gracefully
|
||||
self.prefix = "w2p:cache:%s:" % (self.request.application)
|
||||
if self.request:
|
||||
app = self.request.application
|
||||
else:
|
||||
app = ''
|
||||
|
||||
if not app in self.meta_storage:
|
||||
if app not in self.meta_storage:
|
||||
self.storage = self.meta_storage[app] = {
|
||||
CacheAbstract.cache_stats_name: {
|
||||
'hit_total': 0,
|
||||
@@ -107,7 +109,8 @@ class RedisClient(object):
|
||||
|
||||
self.cache_set_key = 'w2p:%s:___cache_set' % (self.request.application)
|
||||
|
||||
self.r_server = redis.Redis(host=host, port=port, db=self.db, password=self.password)
|
||||
self.r_server = redis_conn
|
||||
self._release_script = register_release_lock(self.r_server)
|
||||
|
||||
def initialize(self):
|
||||
pass
|
||||
@@ -140,26 +143,20 @@ class RedisClient(object):
|
||||
#naive distributed locking
|
||||
if with_lock:
|
||||
lock_key = '%s:__lock' % newKey
|
||||
try:
|
||||
while True:
|
||||
lock = self.r_server.setnx(lock_key, 1)
|
||||
if lock:
|
||||
value = self.cache_it(newKey, f, time_expire)
|
||||
break
|
||||
else:
|
||||
time.sleep(0.2)
|
||||
#did someone else create it in the meanwhile ?
|
||||
obj = self.r_server.get(newKey)
|
||||
if obj:
|
||||
value = pickle.loads(obj)
|
||||
break
|
||||
finally:
|
||||
self.r_server.delete(lock_key)
|
||||
randomvalue = time.time()
|
||||
al = acquire_lock(self.r_server, lock_key, randomvalue)
|
||||
#someone may have computed it
|
||||
obj = self.r_server.get(newKey)
|
||||
if obj is None:
|
||||
value = self.cache_it(newKey, f, time_expire)
|
||||
else:
|
||||
value = pickle.loads(obj)
|
||||
release_lock(self, lock_key, al)
|
||||
else:
|
||||
#without distributed locking
|
||||
value = self.cache_it(newKey, f, time_expire)
|
||||
return value
|
||||
except ConnectionError:
|
||||
except RConnectionError:
|
||||
return self.retry_call(key, f, time_expire, with_lock)
|
||||
|
||||
def cache_it(self, key, f, time_expire):
|
||||
@@ -172,39 +169,42 @@ class RedisClient(object):
|
||||
value_ = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||
if time_expire == 0:
|
||||
time_expire = 1
|
||||
self.r_server.setex(key, value_, time_expire)
|
||||
self.r_server.setex(key, time_expire, value_)
|
||||
#print '%s will expire on %s: it goes in bucket %s' % (key, time.ctime(expireat))
|
||||
#print 'that will expire on %s' % (bucket_key, time.ctime(((expireat/60) + 1)*60))
|
||||
p = self.r_server.pipeline()
|
||||
#add bucket to the fixed set
|
||||
p.sadd(cache_set_key, bucket_key)
|
||||
#sets the key
|
||||
p.setex(key, value_, time_expire)
|
||||
p.setex(key, time_expire, value_)
|
||||
#add the key to the bucket
|
||||
p.sadd(bucket_key, key)
|
||||
#expire the bucket properly
|
||||
p.expireat(bucket_key, ((expireat/60) + 1)*60)
|
||||
p.expireat(bucket_key, ((expireat / 60) + 1) * 60)
|
||||
p.execute()
|
||||
return value
|
||||
|
||||
def retry_call(self, key, f, time_expire, with_locking):
|
||||
def retry_call(self, key, f, time_expire, with_lock):
|
||||
self.RETRIES += 1
|
||||
if self.RETRIES <= self.MAX_RETRIES:
|
||||
logger.error("sleeping %s seconds before reconnecting" %
|
||||
(2 * self.RETRIES))
|
||||
time.sleep(2 * self.RETRIES)
|
||||
self.__init__(self.server, self.db, self.debug, self.with_lock)
|
||||
return self.__call__(key, f, time_expire, with_locking)
|
||||
if self.fail_gracefully:
|
||||
self.RETRIES = 0
|
||||
return f()
|
||||
return self.__call__(key, f, time_expire, with_lock)
|
||||
else:
|
||||
self.RETRIES = 0
|
||||
raise ConnectionError('Redis instance is unavailable at %s' % (
|
||||
self.server))
|
||||
if self.fail_gracefully:
|
||||
return f
|
||||
raise RConnectionError('Redis instance is unavailable')
|
||||
|
||||
def increment(self, key, value=1):
|
||||
try:
|
||||
newKey = self.__keyFormat__(key)
|
||||
return self.r_server.incr(newKey, value)
|
||||
except ConnectionError:
|
||||
except RConnectionError:
|
||||
return self.retry_increment(key, value)
|
||||
|
||||
def retry_increment(self, key, value):
|
||||
@@ -212,12 +212,10 @@ class RedisClient(object):
|
||||
if self.RETRIES <= self.MAX_RETRIES:
|
||||
logger.error("sleeping some seconds before reconnecting")
|
||||
time.sleep(2 * self.RETRIES)
|
||||
self.__init__(self.server, self.db, self.debug, self.with_lock)
|
||||
return self.increment(key, value)
|
||||
else:
|
||||
self.RETRIES = 0
|
||||
raise ConnectionError('Redis instance is unavailable at %s' % (
|
||||
self.server))
|
||||
raise RConnectionError('Redis instance is unavailable')
|
||||
|
||||
def clear(self, regex):
|
||||
"""
|
||||
@@ -225,9 +223,9 @@ class RedisClient(object):
|
||||
clear cache entries
|
||||
"""
|
||||
r = re.compile(regex)
|
||||
#get all buckets
|
||||
# get all buckets
|
||||
buckets = self.r_server.smembers(self.cache_set_key)
|
||||
#get all keys in buckets
|
||||
# get all keys in buckets
|
||||
if buckets:
|
||||
keys = self.r_server.sunion(buckets)
|
||||
else:
|
||||
@@ -237,8 +235,8 @@ class RedisClient(object):
|
||||
for a in keys:
|
||||
if r.match(str(a).replace(prefix, '', 1)):
|
||||
pipe.delete(a)
|
||||
if random.randrange(0,100) < 10:
|
||||
#do this just once in a while (10% chance)
|
||||
if random.randrange(0, 100) < 10:
|
||||
# do this just once in a while (10% chance)
|
||||
self.clear_buckets(buckets)
|
||||
pipe.execute()
|
||||
|
||||
|
||||
@@ -0,0 +1,785 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
| This file is part of the web2py Web Framework
|
||||
| Created by niphlod@gmail.com
|
||||
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
|
||||
|
||||
Scheduler with redis backend
|
||||
---------------------------------
|
||||
"""
|
||||
|
||||
USAGE = """
|
||||
## Example
|
||||
|
||||
For any existing app
|
||||
|
||||
Create File: app/models/scheduler.py ======
|
||||
from gluon.contrib.redis_utils import RConn
|
||||
from gluon.contrib.redis_scheduler import RScheduler
|
||||
|
||||
def demo1(*args,**vars):
|
||||
print 'you passed args=%s and vars=%s' % (args, vars)
|
||||
return 'done!'
|
||||
|
||||
def demo2():
|
||||
1/0
|
||||
|
||||
rconn = RConn()
|
||||
mysched = RScheduler(db, dict(demo1=demo1,demo2=demo2), ...., redis_conn=rconn)
|
||||
|
||||
## run worker nodes with:
|
||||
|
||||
cd web2py
|
||||
python web2py.py -K app
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import socket
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
path = os.getcwd()
|
||||
|
||||
if 'WEB2PY_PATH' not in os.environ:
|
||||
os.environ['WEB2PY_PATH'] = path
|
||||
|
||||
try:
|
||||
from gluon.contrib.simplejson import loads, dumps
|
||||
except:
|
||||
from simplejson import loads, dumps
|
||||
|
||||
IDENTIFIER = "%s#%s" % (socket.gethostname(), os.getpid())
|
||||
|
||||
logger = logging.getLogger('web2py.rscheduler.%s' % IDENTIFIER)
|
||||
|
||||
from gluon.utils import web2py_uuid
|
||||
from gluon.storage import Storage
|
||||
from gluon.scheduler import *
|
||||
from gluon.scheduler import _decode_dict
|
||||
from gluon.contrib.redis_utils import RWatchError
|
||||
|
||||
|
||||
POLLING = 'POLLING'
|
||||
|
||||
|
||||
class RScheduler(Scheduler):
|
||||
|
||||
def __init__(self, db, tasks=None, migrate=True,
|
||||
worker_name=None, group_names=None, heartbeat=HEARTBEAT,
|
||||
max_empty_runs=0, discard_results=False, utc_time=False,
|
||||
redis_conn=None, mode=1):
|
||||
|
||||
"""
|
||||
Highly-experimental coordination with redis
|
||||
Takes all args from Scheduler except redis_conn which
|
||||
must be something closer to a StrictRedis instance.
|
||||
|
||||
My only regret - and the reason why I kept this under the hood for a
|
||||
while - is that it's hard to hook up in web2py to something happening
|
||||
right after the commit to a table, which will enable this version of the
|
||||
scheduler to process "immediate" tasks right away instead of waiting a
|
||||
few seconds (see FIXME in queue_task())
|
||||
|
||||
mode is reserved for future usage patterns.
|
||||
Right now it moves the coordination (which is the most intensive
|
||||
routine in the scheduler in matters of IPC) of workers to redis.
|
||||
I'd like to have incrementally redis-backed modes of operations,
|
||||
such as e.g.:
|
||||
- 1: IPC through redis (which is the current implementation)
|
||||
- 2: Store task results in redis (which will relieve further pressure
|
||||
from the db leaving the scheduler_run table empty and possibly
|
||||
keep things smooth as tasks results can be set to expire
|
||||
after a bit of time)
|
||||
- 3: Move all the logic for storing and queueing tasks to redis
|
||||
itself - which means no scheduler_task usage too - and use
|
||||
the database only as an historical record-bookkeeping
|
||||
(e.g. for reporting)
|
||||
|
||||
As usual, I'm eager to see your comments.
|
||||
"""
|
||||
|
||||
Scheduler.__init__(self, db, tasks=tasks, migrate=migrate,
|
||||
worker_name=worker_name, group_names=group_names,
|
||||
heartbeat=heartbeat, max_empty_runs=max_empty_runs,
|
||||
discard_results=discard_results, utc_time=utc_time)
|
||||
|
||||
self.r_server = redis_conn
|
||||
from gluon import current
|
||||
self._application = current.request.application or 'appname'
|
||||
|
||||
def _nkey(self, key):
|
||||
"""Helper to restrict all keys to a namespace
|
||||
and track them"""
|
||||
prefix = 'w2p:rsched:%s' % self._application
|
||||
allkeys = '%s:allkeys' % prefix
|
||||
newkey = "%s:%s" % (prefix, key)
|
||||
self.r_server.sadd(allkeys, newkey)
|
||||
return newkey
|
||||
|
||||
def prune_all(self):
|
||||
"""
|
||||
Just to be fair and implement a method
|
||||
that does housekeeping
|
||||
"""
|
||||
all_keys = self._nkey('allkeys')
|
||||
with self.r_server.pipeline() as pipe:
|
||||
while True:
|
||||
try:
|
||||
pipe.watch('PRUNE_ALL')
|
||||
while True:
|
||||
k = pipe.spop(all_keys)
|
||||
if k is None:
|
||||
break
|
||||
pipe.delete(k)
|
||||
pipe.execute()
|
||||
break
|
||||
except RWatchError:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
def dt2str(self, value):
|
||||
return value.strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
def str2date(self, value):
|
||||
return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
def send_heartbeat(self, counter):
|
||||
"""
|
||||
workers coordination has evolved into something is not that
|
||||
easy. Here we try to do what we need in a single transaction,
|
||||
and retry that transaction if something goes wrong
|
||||
"""
|
||||
with self.r_server.pipeline() as pipe:
|
||||
while True:
|
||||
try:
|
||||
pipe.watch('SEND_HEARTBEAT')
|
||||
self.inner_send_heartbeat(counter, pipe)
|
||||
pipe.execute()
|
||||
self.adj_hibernation()
|
||||
self.sleep()
|
||||
break
|
||||
except RWatchError:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
def inner_send_heartbeat(self, counter, pipe):
|
||||
"""
|
||||
Does a few things:
|
||||
- registers the workers
|
||||
- accepts commands sent to workers (KILL, TERMINATE, PICK, DISABLED, etc)
|
||||
- adjusts sleep
|
||||
- saves stats
|
||||
- elects master
|
||||
- does "housecleaning" for dead workers
|
||||
- triggers tasks assignment
|
||||
"""
|
||||
r_server = pipe
|
||||
status_keyset = self._nkey('worker_statuses')
|
||||
status_key = self._nkey('worker_status:%s' % (self.worker_name))
|
||||
now = self.now()
|
||||
mybackedstatus = r_server.hgetall(status_key)
|
||||
if not mybackedstatus:
|
||||
r_server.hmset(
|
||||
status_key,
|
||||
dict(
|
||||
status=ACTIVE, worker_name=self.worker_name,
|
||||
first_heartbeat=self.dt2str(now),
|
||||
last_heartbeat=self.dt2str(now),
|
||||
group_names=dumps(self.group_names), is_ticker=False,
|
||||
worker_stats=dumps(self.w_stats))
|
||||
)
|
||||
r_server.sadd(status_keyset, status_key)
|
||||
if not self.w_stats.status == POLLING:
|
||||
self.w_stats.status = ACTIVE
|
||||
self.w_stats.sleep = self.heartbeat
|
||||
mybackedstatus = ACTIVE
|
||||
else:
|
||||
mybackedstatus = mybackedstatus['status']
|
||||
if mybackedstatus == DISABLED:
|
||||
# keep sleeping
|
||||
self.w_stats.status = DISABLED
|
||||
r_server.hmset(
|
||||
status_key,
|
||||
dict(last_heartbeat=self.dt2str(now),
|
||||
worker_stats=dumps(self.w_stats))
|
||||
)
|
||||
elif mybackedstatus == TERMINATE:
|
||||
self.w_stats.status = TERMINATE
|
||||
logger.debug("Waiting to terminate the current task")
|
||||
self.give_up()
|
||||
elif mybackedstatus == KILL:
|
||||
self.w_stats.status = KILL
|
||||
self.die()
|
||||
else:
|
||||
if mybackedstatus == STOP_TASK:
|
||||
logger.info('Asked to kill the current task')
|
||||
self.terminate_process()
|
||||
logger.info('........recording heartbeat (%s)',
|
||||
self.w_stats.status)
|
||||
r_server.hmset(
|
||||
status_key,
|
||||
dict(
|
||||
last_heartbeat=self.dt2str(now), status=ACTIVE,
|
||||
worker_stats=dumps(self.w_stats)
|
||||
)
|
||||
)
|
||||
# newroutine
|
||||
r_server.expire(status_key, self.heartbeat * 3 * 15)
|
||||
self.w_stats.sleep = self.heartbeat # re-activating the process
|
||||
if self.w_stats.status not in (RUNNING, POLLING):
|
||||
self.w_stats.status = ACTIVE
|
||||
|
||||
self.do_assign_tasks = False
|
||||
if counter % 5 == 0 or mybackedstatus == PICK:
|
||||
try:
|
||||
logger.info(
|
||||
' freeing workers that have not sent heartbeat')
|
||||
registered_workers = r_server.smembers(status_keyset)
|
||||
allkeys = self._nkey('allkeys')
|
||||
for worker in registered_workers:
|
||||
w = r_server.hgetall(worker)
|
||||
w = Storage(w)
|
||||
if not w:
|
||||
r_server.srem(status_keyset, worker)
|
||||
logger.info('removing %s from %s', worker, allkeys)
|
||||
r_server.srem(allkeys, worker)
|
||||
continue
|
||||
try:
|
||||
self.is_a_ticker = self.being_a_ticker(pipe)
|
||||
except:
|
||||
pass
|
||||
if self.w_stats.status in (ACTIVE, POLLING):
|
||||
self.do_assign_tasks = True
|
||||
if self.is_a_ticker and self.do_assign_tasks:
|
||||
# I'm a ticker, and 5 loops passed without reassigning tasks,
|
||||
# let's do that and loop again
|
||||
if not self.db_thread:
|
||||
logger.debug('thread building own DAL object')
|
||||
self.db_thread = DAL(
|
||||
self.db._uri, folder=self.db._adapter.folder)
|
||||
self.define_tables(self.db_thread, migrate=False)
|
||||
db = self.db_thread
|
||||
self.wrapped_assign_tasks(db)
|
||||
return None
|
||||
except:
|
||||
logger.error('Error assigning tasks')
|
||||
|
||||
def being_a_ticker(self, pipe):
|
||||
"""
|
||||
This is slightly more convoluted than the original
|
||||
but if far more efficient
|
||||
"""
|
||||
r_server = pipe
|
||||
status_keyset = self._nkey('worker_statuses')
|
||||
registered_workers = r_server.smembers(status_keyset)
|
||||
ticker = None
|
||||
all_active = []
|
||||
all_workers = []
|
||||
for worker in registered_workers:
|
||||
w = r_server.hgetall(worker)
|
||||
if w['worker_name'] != self.worker_name and w['status'] == ACTIVE:
|
||||
all_active.append(w)
|
||||
if w['is_ticker'] == 'True' and ticker is None:
|
||||
ticker = w
|
||||
all_workers.append(w)
|
||||
not_busy = self.w_stats.status in (ACTIVE, POLLING)
|
||||
if not ticker:
|
||||
if not_busy:
|
||||
# only if this worker isn't busy, otherwise wait for a free one
|
||||
for worker in all_workers:
|
||||
key = self._nkey('worker_status:%s' % worker['worker_name'])
|
||||
if worker['worker_name'] == self.worker_name:
|
||||
r_server.hset(key, 'is_ticker', True)
|
||||
else:
|
||||
r_server.hset(key, 'is_ticker', False)
|
||||
logger.info("TICKER: I'm a ticker")
|
||||
else:
|
||||
# giving up, only if I'm not alone
|
||||
if len(all_active) > 1:
|
||||
key = self._nkey('worker_status:%s' % (self.worker_name))
|
||||
r_server.hset(key, 'is_ticker', False)
|
||||
else:
|
||||
not_busy = True
|
||||
return not_busy
|
||||
else:
|
||||
logger.info(
|
||||
"%s is a ticker, I'm a poor worker" % ticker['worker_name'])
|
||||
return False
|
||||
|
||||
def assign_tasks(self, db):
|
||||
"""
|
||||
The real beauty. We don't need to ASSIGN tasks, we just put
|
||||
them into the relevant queue
|
||||
"""
|
||||
st, sd = db.scheduler_task, db.scheduler_task_deps
|
||||
r_server = self.r_server
|
||||
now = self.now()
|
||||
status_keyset = self._nkey('worker_statuses')
|
||||
with r_server.pipeline() as pipe:
|
||||
while 1:
|
||||
try:
|
||||
# making sure we're the only one doing the job
|
||||
pipe.watch('ASSIGN_TASKS')
|
||||
registered_workers = pipe.smembers(status_keyset)
|
||||
all_workers = []
|
||||
for worker in registered_workers:
|
||||
w = pipe.hgetall(worker)
|
||||
if w['status'] == ACTIVE:
|
||||
all_workers.append(Storage(w))
|
||||
pipe.execute()
|
||||
break
|
||||
except RWatchError:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
# build workers as dict of groups
|
||||
wkgroups = {}
|
||||
for w in all_workers:
|
||||
group_names = loads(w.group_names)
|
||||
for gname in group_names:
|
||||
if gname not in wkgroups:
|
||||
wkgroups[gname] = dict(
|
||||
workers=[{'name': w.worker_name, 'c': 0}])
|
||||
else:
|
||||
wkgroups[gname]['workers'].append(
|
||||
{'name': w.worker_name, 'c': 0})
|
||||
# set queued tasks that expired between "runs" (i.e., you turned off
|
||||
# the scheduler): then it wasn't expired, but now it is
|
||||
db(
|
||||
(st.status.belongs((QUEUED, ASSIGNED))) &
|
||||
(st.stop_time < now)
|
||||
).update(status=EXPIRED)
|
||||
|
||||
# calculate dependencies
|
||||
deps_with_no_deps = db(
|
||||
(sd.can_visit == False) &
|
||||
(~sd.task_child.belongs(
|
||||
db(sd.can_visit == False)._select(sd.task_parent)
|
||||
)
|
||||
)
|
||||
)._select(sd.task_child)
|
||||
no_deps = db(
|
||||
(st.status.belongs((QUEUED, ASSIGNED))) &
|
||||
(
|
||||
(sd.id == None) | (st.id.belongs(deps_with_no_deps))
|
||||
|
||||
)
|
||||
)._select(st.id, distinct=True, left=sd.on(
|
||||
(st.id == sd.task_parent) &
|
||||
(sd.can_visit == False)
|
||||
)
|
||||
)
|
||||
|
||||
all_available = db(
|
||||
(st.status.belongs((QUEUED, ASSIGNED))) &
|
||||
((st.times_run < st.repeats) | (st.repeats == 0)) &
|
||||
(st.start_time <= now) &
|
||||
((st.stop_time == None) | (st.stop_time > now)) &
|
||||
(st.next_run_time <= now) &
|
||||
(st.enabled == True) &
|
||||
(st.id.belongs(no_deps))
|
||||
)
|
||||
|
||||
limit = len(all_workers) * (50 / (len(wkgroups) or 1))
|
||||
|
||||
# let's freeze it up
|
||||
db.commit()
|
||||
x = 0
|
||||
r_server = self.r_server
|
||||
for group in wkgroups.keys():
|
||||
queued_list = self._nkey('queued:%s' % group)
|
||||
queued_set = self._nkey('queued_set:%s' % group)
|
||||
# if are running, let's don't assign them again
|
||||
running_list = self._nkey('running:%s' % group)
|
||||
while True:
|
||||
# the joys for rpoplpush!
|
||||
t = r_server.rpoplpush(running_list, queued_list)
|
||||
if not t:
|
||||
# no more
|
||||
break
|
||||
r_server.sadd(queued_set, t)
|
||||
|
||||
tasks = all_available(st.group_name == group).select(
|
||||
limitby=(0, limit), orderby = st.next_run_time)
|
||||
|
||||
# put tasks in the processing list
|
||||
|
||||
for task in tasks:
|
||||
x += 1
|
||||
gname = task.group_name
|
||||
|
||||
if r_server.sismember(queued_set, task.id):
|
||||
# already queued, we don't put on the list
|
||||
continue
|
||||
r_server.sadd(queued_set, task.id)
|
||||
r_server.lpush(queued_list, task.id)
|
||||
d = dict(status=QUEUED)
|
||||
if not task.task_name:
|
||||
d['task_name'] = task.function_name
|
||||
db(
|
||||
(st.id == task.id) &
|
||||
(st.status.belongs((QUEUED, ASSIGNED)))
|
||||
).update(**d)
|
||||
db.commit()
|
||||
# I didn't report tasks but I'm working nonetheless!!!!
|
||||
if x > 0:
|
||||
self.w_stats.empty_runs = 0
|
||||
self.w_stats.queue = x
|
||||
self.w_stats.distribution = wkgroups
|
||||
self.w_stats.workers = len(all_workers)
|
||||
# I'll be greedy only if tasks queued are equal to the limit
|
||||
# (meaning there could be others ready to be queued)
|
||||
self.greedy = x >= limit
|
||||
logger.info('TICKER: workers are %s', len(all_workers))
|
||||
logger.info('TICKER: tasks are %s', x)
|
||||
|
||||
def pop_task(self, db):
|
||||
r_server = self.r_server
|
||||
st = self.db.scheduler_task
|
||||
task = None
|
||||
# ready to process something
|
||||
for group in self.group_names:
|
||||
queued_set = self._nkey('queued_set:%s' % group)
|
||||
queued_list = self._nkey('queued:%s' % group)
|
||||
running_list = self._nkey('running:%s' % group)
|
||||
running_dict = self._nkey('running_dict:%s' % group)
|
||||
self.w_stats.status = POLLING
|
||||
# polling for 1 minute in total. If more groups are in,
|
||||
# polling is 1 minute in total
|
||||
logger.debug(' polling on %s' , group)
|
||||
task_id = r_server.brpoplpush(queued_list, running_list, timeout=60/len(self.group_names))
|
||||
logger.debug(' finished polling')
|
||||
self.w_stats.status = ACTIVE
|
||||
if task_id:
|
||||
r_server.hset(running_dict, task_id, self.worker_name)
|
||||
r_server.srem(queued_set, task_id)
|
||||
task = db(
|
||||
(st.id == task_id) &
|
||||
(st.status == QUEUED)
|
||||
).select().first()
|
||||
if not task:
|
||||
r_server.lrem(running_list, 0, task_id)
|
||||
r_server.hdel(running_dict, task_id)
|
||||
r_server.lrem(queued_list, 0, task_id)
|
||||
logger.error("we received a task that isn't there (%s)" % task_id)
|
||||
return None
|
||||
break
|
||||
now = self.now()
|
||||
if task:
|
||||
task.update_record(status=RUNNING, last_run_time=now)
|
||||
# noone will touch my task!
|
||||
db.commit()
|
||||
logger.debug(' work to do %s', task.id)
|
||||
else:
|
||||
logger.info('nothing to do (%s)' % self.w_stats.status)
|
||||
return None
|
||||
times_run = task.times_run + 1
|
||||
if not task.prevent_drift:
|
||||
next_run_time = task.last_run_time + datetime.timedelta(
|
||||
seconds=task.period
|
||||
)
|
||||
else:
|
||||
next_run_time = task.start_time + datetime.timedelta(
|
||||
seconds=task.period * times_run
|
||||
)
|
||||
if times_run < task.repeats or task.repeats == 0:
|
||||
# need to run (repeating task)
|
||||
run_again = True
|
||||
else:
|
||||
# no need to run again
|
||||
run_again = False
|
||||
run_id = 0
|
||||
while True and not self.discard_results:
|
||||
logger.debug(' new scheduler_run record')
|
||||
try:
|
||||
run_id = db.scheduler_run.insert(
|
||||
task_id=task.id,
|
||||
status=RUNNING,
|
||||
start_time=now,
|
||||
worker_name=self.worker_name)
|
||||
db.commit()
|
||||
break
|
||||
except:
|
||||
time.sleep(0.5)
|
||||
db.rollback()
|
||||
logger.info('new task %(id)s "%(task_name)s"'
|
||||
' %(application_name)s.%(function_name)s' % task)
|
||||
return Task(
|
||||
app=task.application_name,
|
||||
function=task.function_name,
|
||||
timeout=task.timeout,
|
||||
args=task.args, # in json
|
||||
vars=task.vars, # in json
|
||||
task_id=task.id,
|
||||
run_id=run_id,
|
||||
run_again=run_again,
|
||||
next_run_time=next_run_time,
|
||||
times_run=times_run,
|
||||
stop_time=task.stop_time,
|
||||
retry_failed=task.retry_failed,
|
||||
times_failed=task.times_failed,
|
||||
sync_output=task.sync_output,
|
||||
uuid=task.uuid,
|
||||
group_name=task.group_name)
|
||||
|
||||
def report_task(self, task, task_report):
|
||||
"""
|
||||
Needs overwriting only because we need to pop from the
|
||||
running tasks
|
||||
"""
|
||||
r_server = self.r_server
|
||||
db = self.db
|
||||
now = self.now()
|
||||
st = db.scheduler_task
|
||||
sr = db.scheduler_run
|
||||
if not self.discard_results:
|
||||
if task_report.result != 'null' or task_report.tb:
|
||||
# result is 'null' as a string if task completed
|
||||
# if it's stopped it's None as NoneType, so we record
|
||||
# the STOPPED "run" anyway
|
||||
logger.debug(' recording task report in db (%s)',
|
||||
task_report.status)
|
||||
db(sr.id == task.run_id).update(
|
||||
status=task_report.status,
|
||||
stop_time=now,
|
||||
run_result=task_report.result,
|
||||
run_output=task_report.output,
|
||||
traceback=task_report.tb)
|
||||
else:
|
||||
logger.debug(' deleting task report in db because of no result')
|
||||
db(sr.id == task.run_id).delete()
|
||||
# if there is a stop_time and the following run would exceed it
|
||||
is_expired = (task.stop_time
|
||||
and task.next_run_time > task.stop_time
|
||||
and True or False)
|
||||
status = (task.run_again and is_expired and EXPIRED
|
||||
or task.run_again and not is_expired
|
||||
and QUEUED or COMPLETED)
|
||||
if task_report.status == COMPLETED:
|
||||
# assigned calculations
|
||||
d = dict(status=status,
|
||||
next_run_time=task.next_run_time,
|
||||
times_run=task.times_run,
|
||||
times_failed=0,
|
||||
assigned_worker_name=self.worker_name
|
||||
)
|
||||
db(st.id == task.task_id).update(**d)
|
||||
if status == COMPLETED:
|
||||
self.update_dependencies(db, task.task_id)
|
||||
else:
|
||||
st_mapping = {'FAILED': 'FAILED',
|
||||
'TIMEOUT': 'TIMEOUT',
|
||||
'STOPPED': 'FAILED'}[task_report.status]
|
||||
status = (task.retry_failed
|
||||
and task.times_failed < task.retry_failed
|
||||
and QUEUED or task.retry_failed == -1
|
||||
and QUEUED or st_mapping)
|
||||
db(st.id == task.task_id).update(
|
||||
times_failed=db.scheduler_task.times_failed + 1,
|
||||
next_run_time=task.next_run_time,
|
||||
status=status,
|
||||
assigned_worker_name=self.worker_name
|
||||
)
|
||||
logger.info('task completed (%s)', task_report.status)
|
||||
running_list = self._nkey('running:%s' % task.group_name)
|
||||
running_dict = self._nkey('running_dict:%s' % task.group_name)
|
||||
r_server.lrem(running_list, 0, task.task_id)
|
||||
r_server.hdel(running_dict, task.task_id)
|
||||
|
||||
def wrapped_pop_task(self):
|
||||
"""Commodity function to call `pop_task` and trap exceptions
|
||||
If an exception is raised, assume it happened because of database
|
||||
contention and retries `pop_task` after 0.5 seconds
|
||||
"""
|
||||
db = self.db
|
||||
db.commit() # another nifty db.commit() only for Mysql
|
||||
x = 0
|
||||
while x < 10:
|
||||
try:
|
||||
rtn = self.pop_task(db)
|
||||
return rtn
|
||||
break
|
||||
# this is here to "interrupt" any blrpoplpush op easily
|
||||
except KeyboardInterrupt:
|
||||
self.give_up()
|
||||
break
|
||||
except:
|
||||
self.w_stats.errors += 1
|
||||
db.rollback()
|
||||
logger.error(' error popping tasks')
|
||||
x += 1
|
||||
time.sleep(0.5)
|
||||
|
||||
def get_workers(self, only_ticker=False):
|
||||
""" Returns a dict holding worker_name : {**columns}
|
||||
representing all "registered" workers
|
||||
only_ticker returns only the worker running as a TICKER,
|
||||
if there is any
|
||||
"""
|
||||
r_server = self.r_server
|
||||
status_keyset = self._nkey('worker_statuses')
|
||||
registered_workers = r_server.smembers(status_keyset)
|
||||
all_workers = {}
|
||||
for worker in registered_workers:
|
||||
w = r_server.hgetall(worker)
|
||||
w = Storage(w)
|
||||
if not w:
|
||||
continue
|
||||
all_workers[w.worker_name] = Storage(
|
||||
status=w.status,
|
||||
first_heartbeat=self.str2date(w.first_heartbeat),
|
||||
last_heartbeat=self.str2date(w.last_heartbeat),
|
||||
group_names=loads(w.group_names, object_hook=_decode_dict),
|
||||
is_ticker=w.is_ticker == 'True' and True or False,
|
||||
worker_stats=loads(w.worker_stats, object_hook=_decode_dict)
|
||||
)
|
||||
if only_ticker:
|
||||
for k, v in all_workers.iteritems():
|
||||
if v['is_ticker']:
|
||||
return {k: v}
|
||||
return {}
|
||||
return all_workers
|
||||
|
||||
def set_worker_status(self, group_names=None, action=ACTIVE,
|
||||
exclude=None, limit=None, worker_name=None):
|
||||
"""Internal function to set worker's status"""
|
||||
r_server = self.r_server
|
||||
all_workers = self.get_workers()
|
||||
if not group_names:
|
||||
group_names = self.group_names
|
||||
elif isinstance(group_names, str):
|
||||
group_names = [group_names]
|
||||
exclusion = exclude and exclude.append(action) or [action]
|
||||
workers = []
|
||||
if worker_name is not None:
|
||||
if worker_name in all_workers.keys():
|
||||
workers = [worker_name]
|
||||
else:
|
||||
for k, v in all_workers.iteritems():
|
||||
if v.status not in exclusion and set(group_names) & set(v.group_names):
|
||||
workers.append(k)
|
||||
if limit and worker_name is None:
|
||||
workers = workers[:limit]
|
||||
if workers:
|
||||
with r_server.pipeline() as pipe:
|
||||
while True:
|
||||
try:
|
||||
pipe.watch('SET_WORKER_STATUS')
|
||||
for w in workers:
|
||||
worker_key = self._nkey('worker_status:%s' % w)
|
||||
pipe.hset(worker_key, 'status', action)
|
||||
pipe.execute()
|
||||
break
|
||||
except RWatchError:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
def queue_task(self, function, pargs=[], pvars={}, **kwargs):
|
||||
"""
|
||||
FIXME: immediate should put item in queue. The hard part is
|
||||
that currently there are no hooks happening at post-commit time
|
||||
Queue tasks. This takes care of handling the validation of all
|
||||
parameters
|
||||
|
||||
Args:
|
||||
function: the function (anything callable with a __name__)
|
||||
pargs: "raw" args to be passed to the function. Automatically
|
||||
jsonified.
|
||||
pvars: "raw" kwargs to be passed to the function. Automatically
|
||||
jsonified
|
||||
kwargs: all the parameters available (basically, every
|
||||
`scheduler_task` column). If args and vars are here, they should
|
||||
be jsonified already, and they will override pargs and pvars
|
||||
|
||||
Returns:
|
||||
a dict just as a normal validate_and_insert(), plus a uuid key
|
||||
holding the uuid of the queued task. If validation is not passed
|
||||
( i.e. some parameters are invalid) both id and uuid will be None,
|
||||
and you'll get an "error" dict holding the errors found.
|
||||
"""
|
||||
if hasattr(function, '__name__'):
|
||||
function = function.__name__
|
||||
targs = 'args' in kwargs and kwargs.pop('args') or dumps(pargs)
|
||||
tvars = 'vars' in kwargs and kwargs.pop('vars') or dumps(pvars)
|
||||
tuuid = 'uuid' in kwargs and kwargs.pop('uuid') or web2py_uuid()
|
||||
tname = 'task_name' in kwargs and kwargs.pop('task_name') or function
|
||||
immediate = 'immediate' in kwargs and kwargs.pop('immediate') or None
|
||||
rtn = self.db.scheduler_task.validate_and_insert(
|
||||
function_name=function,
|
||||
task_name=tname,
|
||||
args=targs,
|
||||
vars=tvars,
|
||||
uuid=tuuid,
|
||||
**kwargs)
|
||||
if not rtn.errors:
|
||||
rtn.uuid = tuuid
|
||||
if immediate:
|
||||
r_server = self.r_server
|
||||
ticker = self.get_workers(only_ticker=True)
|
||||
if ticker.keys():
|
||||
ticker = ticker.keys()[0]
|
||||
with r_server.pipeline() as pipe:
|
||||
while True:
|
||||
try:
|
||||
pipe.watch('SET_WORKER_STATUS')
|
||||
worker_key = self._nkey('worker_status:%s' % ticker)
|
||||
pipe.hset(worker_key, 'status', 'PICK')
|
||||
pipe.execute()
|
||||
break
|
||||
except RWatchError:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
else:
|
||||
rtn.uuid = None
|
||||
return rtn
|
||||
|
||||
def stop_task(self, ref):
|
||||
"""Shortcut for task termination.
|
||||
|
||||
If the task is RUNNING it will terminate it, meaning that status
|
||||
will be set as FAILED.
|
||||
|
||||
If the task is QUEUED, its stop_time will be set as to "now",
|
||||
the enabled flag will be set to False, and the status to STOPPED
|
||||
|
||||
Args:
|
||||
ref: can be
|
||||
|
||||
- an integer : lookup will be done by scheduler_task.id
|
||||
- a string : lookup will be done by scheduler_task.uuid
|
||||
|
||||
Returns:
|
||||
- 1 if task was stopped (meaning an update has been done)
|
||||
- None if task was not found, or if task was not RUNNING or QUEUED
|
||||
|
||||
Note:
|
||||
Experimental
|
||||
"""
|
||||
r_server = self.r_server
|
||||
st = self.db.scheduler_task
|
||||
if isinstance(ref, int):
|
||||
q = st.id == ref
|
||||
elif isinstance(ref, str):
|
||||
q = st.uuid == ref
|
||||
else:
|
||||
raise SyntaxError(
|
||||
"You can retrieve results only by id or uuid")
|
||||
task = self.db(q).select(st.id, st.status, st.group_name)
|
||||
task = task.first()
|
||||
rtn = None
|
||||
if not task:
|
||||
return rtn
|
||||
running_dict = self._nkey('running_dict:%s' % task.group_name)
|
||||
if task.status == 'RUNNING':
|
||||
worker_key = r_server.hget(running_dict, task.id)
|
||||
worker_key = self._nkey('worker_status:%s' % (worker_key))
|
||||
r_server.hset(worker_key, 'status', STOP_TASK)
|
||||
elif task.status == 'QUEUED':
|
||||
rtn = self.db(q).update(
|
||||
stop_time=self.now(),
|
||||
enabled=False,
|
||||
status=STOPPED)
|
||||
return rtn
|
||||
@@ -1,13 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Developed by niphlod@gmail.com
|
||||
License MIT/BSD/GPL
|
||||
|
||||
Redis-backed sessions
|
||||
"""
|
||||
|
||||
import redis
|
||||
from gluon import current
|
||||
from gluon.storage import Storage
|
||||
import time
|
||||
import logging
|
||||
import thread
|
||||
from gluon import current
|
||||
from gluon.storage import Storage
|
||||
from gluon.contrib.redis_utils import acquire_lock, release_lock
|
||||
from gluon.contrib.redis_utils import register_release_lock
|
||||
|
||||
logger = logging.getLogger("web2py.session.redis")
|
||||
|
||||
@@ -16,10 +21,20 @@ locker = thread.allocate_lock()
|
||||
|
||||
def RedisSession(*args, **vars):
|
||||
"""
|
||||
Usage example: put in models
|
||||
from gluon.contrib.redis_session import RedisSession
|
||||
sessiondb = RedisSession('localhost:6379',db=0, session_expiry=False, password=None)
|
||||
session.connect(request, response, db = sessiondb)
|
||||
Usage example: put in models::
|
||||
|
||||
from gluon.contrib.redis_utils import RConn
|
||||
rconn = RConn()
|
||||
from gluon.contrib.redis_session
|
||||
sessiondb = RedisSession(redis_conn=rconn, with_lock=True, session_expiry=False)
|
||||
session.connect(request, response, db = sessiondb)
|
||||
|
||||
Args:
|
||||
redis_conn: a redis-like connection object
|
||||
with_lock: prevent concurrent modifications to the same session
|
||||
session_expiry: delete automatically sessions after n seconds
|
||||
(still need to run sessions2trash.py every 1M sessions
|
||||
or so)
|
||||
|
||||
Simple slip-in storage for session
|
||||
"""
|
||||
@@ -36,30 +51,9 @@ def RedisSession(*args, **vars):
|
||||
|
||||
class RedisClient(object):
|
||||
|
||||
meta_storage = {}
|
||||
MAX_RETRIES = 5
|
||||
RETRIES = 0
|
||||
_release_script = None
|
||||
|
||||
def __init__(self, server='localhost:6379', db=None, debug=False,
|
||||
session_expiry=False, with_lock=False, password=None):
|
||||
"""session_expiry can be an integer, in seconds, to set the default expiration
|
||||
of sessions. The corresponding record will be deleted from the redis instance,
|
||||
and there's virtually no need to run sessions2trash.py
|
||||
"""
|
||||
self.server = server
|
||||
self.password = password
|
||||
self.db = db or 0
|
||||
host, port = (self.server.split(':') + ['6379'])[:2]
|
||||
port = int(port)
|
||||
self.debug = debug
|
||||
if current and current.request:
|
||||
self.app = current.request.application
|
||||
else:
|
||||
self.app = ''
|
||||
self.r_server = redis.Redis(host=host, port=port, db=self.db, password=self.password)
|
||||
if with_lock:
|
||||
RedisClient._release_script = self.r_server.register_script(_LUA_RELEASE_LOCK)
|
||||
def __init__(self, redis_conn, session_expiry=False, with_lock=False):
|
||||
self.r_server = redis_conn
|
||||
self._release_script = register_release_lock(self.r_server)
|
||||
self.tablename = None
|
||||
self.session_expiry = session_expiry
|
||||
self.with_lock = with_lock
|
||||
@@ -93,12 +87,11 @@ class RedisClient(object):
|
||||
class MockTable(object):
|
||||
|
||||
def __init__(self, db, r_server, tablename, session_expiry, with_lock=False):
|
||||
# here self.db is the RedisClient instance
|
||||
self.db = db
|
||||
self.r_server = r_server
|
||||
self.tablename = tablename
|
||||
# set the namespace for sessions of this app
|
||||
self.keyprefix = 'w2p:sess:%s' % tablename.replace(
|
||||
'web2py_session_', '')
|
||||
self.keyprefix = 'w2p:sess:%s' % tablename.replace('web2py_session_', '')
|
||||
# fast auto-increment id (needed for session handling)
|
||||
self.serial = "%s:serial" % self.keyprefix
|
||||
# index of all the session keys of this app
|
||||
@@ -126,7 +119,7 @@ class MockTable(object):
|
||||
if key == 'id':
|
||||
# return a fake query. We need to query it just by id for normal operations
|
||||
self.query = MockQuery(
|
||||
field='id', db=self.r_server,
|
||||
field='id', db=self.db,
|
||||
prefix=self.keyprefix, session_expiry=self.session_expiry,
|
||||
with_lock=self.with_lock, unique_key=self.unique_key
|
||||
)
|
||||
@@ -140,12 +133,12 @@ class MockTable(object):
|
||||
# 'locked', 'client_ip','created_datetime','modified_datetime'
|
||||
# 'unique_key', 'session_data'
|
||||
# retrieve a new key
|
||||
newid = str(self.r_server.incr(self.serial))
|
||||
newid = str(self.db.r_server.incr(self.serial))
|
||||
key = self.keyprefix + ':' + newid
|
||||
if self.with_lock:
|
||||
key_lock = key + ':lock'
|
||||
acquire_lock(self.r_server, key_lock, newid)
|
||||
with self.r_server.pipeline() as pipe:
|
||||
acquire_lock(self.db.r_server, key_lock, newid)
|
||||
with self.db.r_server.pipeline() as pipe:
|
||||
# add it to the index
|
||||
pipe.sadd(self.id_idx, key)
|
||||
# set a hash key with the Storage
|
||||
@@ -154,7 +147,7 @@ class MockTable(object):
|
||||
pipe.expire(key, self.session_expiry)
|
||||
pipe.execute()
|
||||
if self.with_lock:
|
||||
release_lock(self.r_server, key_lock, newid)
|
||||
release_lock(self.db, key_lock, newid)
|
||||
return newid
|
||||
|
||||
|
||||
@@ -186,8 +179,8 @@ class MockQuery(object):
|
||||
# means that someone wants to retrieve the key self.value
|
||||
key = self.keyprefix + ':' + str(self.value)
|
||||
if self.with_lock:
|
||||
acquire_lock(self.db, key + ':lock', self.value)
|
||||
rtn = self.db.hgetall(key)
|
||||
acquire_lock(self.db.r_server, key + ':lock', self.value, 2)
|
||||
rtn = self.db.r_server.hgetall(key)
|
||||
if rtn:
|
||||
if self.unique_key:
|
||||
# make sure the id and unique_key are correct
|
||||
@@ -201,13 +194,13 @@ class MockQuery(object):
|
||||
rtn = []
|
||||
id_idx = "%s:id_idx" % self.keyprefix
|
||||
# find all session keys of this app
|
||||
allkeys = self.db.smembers(id_idx)
|
||||
allkeys = self.db.r_server.smembers(id_idx)
|
||||
for sess in allkeys:
|
||||
val = self.db.hgetall(sess)
|
||||
val = self.db.r_server.hgetall(sess)
|
||||
if not val:
|
||||
if self.session_expiry:
|
||||
# clean up the idx, because the key expired
|
||||
self.db.srem(id_idx, sess)
|
||||
self.db.r_server.srem(id_idx, sess)
|
||||
continue
|
||||
val = Storage(val)
|
||||
# add a delete_record method (necessary for sessions2trash.py)
|
||||
@@ -222,9 +215,9 @@ class MockQuery(object):
|
||||
# means that the session has been found and needs an update
|
||||
if self.op == 'eq' and self.field == 'id' and self.value:
|
||||
key = self.keyprefix + ':' + str(self.value)
|
||||
if not self.db.exists(key):
|
||||
if not self.db.r_server.exists(key):
|
||||
return None
|
||||
with self.db.pipeline() as pipe:
|
||||
with self.db.r_server.pipeline() as pipe:
|
||||
pipe.hmset(key, kwargs)
|
||||
if self.session_expiry:
|
||||
pipe.expire(key, self.session_expiry)
|
||||
@@ -238,7 +231,7 @@ class MockQuery(object):
|
||||
if self.op == 'eq' and self.field == 'id' and self.value:
|
||||
id_idx = "%s:id_idx" % self.keyprefix
|
||||
key = self.keyprefix + ':' + str(self.value)
|
||||
with self.db.pipeline() as pipe:
|
||||
with self.db.r_server.pipeline() as pipe:
|
||||
pipe.delete(key)
|
||||
pipe.srem(id_idx, key)
|
||||
rtn = pipe.execute()
|
||||
@@ -254,29 +247,6 @@ class RecordDeleter(object):
|
||||
def __call__(self):
|
||||
id_idx = "%s:id_idx" % self.keyprefix
|
||||
# remove from the index
|
||||
self.db.srem(id_idx, self.key)
|
||||
self.db.r_server.srem(id_idx, self.key)
|
||||
# remove the key itself
|
||||
self.db.delete(self.key)
|
||||
|
||||
|
||||
def acquire_lock(conn, lockname, identifier, ltime=10):
|
||||
while True:
|
||||
if conn.set(lockname, identifier, ex=ltime, nx=True):
|
||||
return identifier
|
||||
time.sleep(.01)
|
||||
|
||||
|
||||
_LUA_RELEASE_LOCK = """
|
||||
if redis.call("get", KEYS[1]) == ARGV[1]
|
||||
then
|
||||
return redis.call("del", KEYS[1])
|
||||
else
|
||||
return 0
|
||||
end
|
||||
"""
|
||||
|
||||
|
||||
def release_lock(conn, lockname, identifier):
|
||||
return RedisClient._release_script(
|
||||
keys=[lockname], args=[identifier],
|
||||
client=conn)
|
||||
self.db.r_server.delete(self.key)
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Developed by niphlod@gmail.com
|
||||
License MIT/BSD/GPL
|
||||
|
||||
Serves as base to implement Redis connection object and various utils
|
||||
for redis_cache, redis_session and redis_scheduler in the future
|
||||
Should-could be overriden in case redis doesn't keep up (e.g. cluster support)
|
||||
to ensure compatibility with another - similar - library
|
||||
"""
|
||||
|
||||
import logging
|
||||
import thread
|
||||
import time
|
||||
from gluon import current
|
||||
|
||||
logger = logging.getLogger("web2py.redis_utils")
|
||||
|
||||
try:
|
||||
import redis
|
||||
from redis.exceptions import WatchError as RWatchError
|
||||
from redis.exceptions import ConnectionError as RConnectionError
|
||||
except ImportError:
|
||||
logger.error("Needs redis library to work")
|
||||
raise RuntimeError('Needs redis library to work')
|
||||
|
||||
|
||||
locker = thread.allocate_lock()
|
||||
|
||||
|
||||
def RConn(*args, **vars):
|
||||
"""
|
||||
Istantiates a StrictRedis connection with parameters, at the first time
|
||||
only
|
||||
"""
|
||||
locker.acquire()
|
||||
try:
|
||||
instance_name = 'redis_conn_' + current.request.application
|
||||
if not hasattr(RConn, instance_name):
|
||||
setattr(RConn, instance_name, redis.StrictRedis(*args, **vars))
|
||||
return getattr(RConn, instance_name)
|
||||
finally:
|
||||
locker.release()
|
||||
|
||||
def acquire_lock(conn, lockname, identifier, ltime=10):
|
||||
while True:
|
||||
if conn.set(lockname, identifier, ex=ltime, nx=True):
|
||||
return identifier
|
||||
time.sleep(.01)
|
||||
|
||||
|
||||
_LUA_RELEASE_LOCK = """
|
||||
if redis.call("get", KEYS[1]) == ARGV[1]
|
||||
then
|
||||
return redis.call("del", KEYS[1])
|
||||
else
|
||||
return 0
|
||||
end
|
||||
"""
|
||||
|
||||
|
||||
def release_lock(instance, lockname, identifier):
|
||||
return instance._release_script(
|
||||
keys=[lockname], args=[identifier])
|
||||
|
||||
|
||||
def register_release_lock(conn):
|
||||
rtn = conn.register_script(_LUA_RELEASE_LOCK)
|
||||
return rtn
|
||||
@@ -33,7 +33,7 @@ except ImportError:
|
||||
|
||||
class JSONRPCError(RuntimeError):
|
||||
"Error object for remote procedure call fail"
|
||||
def __init__(self, code, message, data=None):
|
||||
def __init__(self, code, message, data=''):
|
||||
value = "%s: %s\n%s" % (code, message, '\n'.join(data))
|
||||
RuntimeError.__init__(self, value)
|
||||
self.code = code
|
||||
|
||||
@@ -16,7 +16,7 @@ def quote(text):
|
||||
|
||||
class Node:
|
||||
def __init__(self, name, value, url='.', readonly=False, active=True,
|
||||
onchange=None, **kwarg):
|
||||
onchange=None, select=False, size=4, **kwarg):
|
||||
self.url = url
|
||||
self.name = name
|
||||
self.value = str(value)
|
||||
@@ -26,11 +26,21 @@ class Node:
|
||||
self.readonly = readonly
|
||||
self.active = active
|
||||
self.onchange = onchange
|
||||
self.size = 4
|
||||
self.size = size
|
||||
self.locked = False
|
||||
self.select = value if select and not isinstance(value, str) else False
|
||||
|
||||
def xml(self):
|
||||
return """<input name="%s" id="%s" value="%s" size="%s"
|
||||
if self.select:
|
||||
selectAttributes = dict(_name=self.name,_id=self.name,_size=self.size,
|
||||
_onblur="ajax('%s/blur',['%s']);"%(self.url,self.name))
|
||||
# _onkeyup="ajax('%s/keyup',['%s'], ':eval');"%(self.url,self.name),
|
||||
# _onfocus="ajax('%s/focus',['%s'], ':eval');"%(self.url,self.name),
|
||||
for k,v in selectAttributes.items():
|
||||
self.select[k] = v
|
||||
return self.select.xml()
|
||||
else:
|
||||
return """<input name="%s" id="%s" value="%s" size="%s"
|
||||
onkeyup="ajax('%s/keyup',['%s'], ':eval');"
|
||||
onfocus="ajax('%s/focus',['%s'], ':eval');"
|
||||
onblur="ajax('%s/blur',['%s'], ':eval');" %s/>
|
||||
@@ -391,7 +401,8 @@ class Sheet:
|
||||
|
||||
def __init__(self, rows, cols, url='.', readonly=False,
|
||||
active=True, onchange=None, value=None, data=None,
|
||||
headers=None, update_button="", **kwarg):
|
||||
headers=None, update_button="", c_headers=None,
|
||||
r_headers=None, **kwarg):
|
||||
|
||||
"""
|
||||
Arguments:
|
||||
@@ -425,6 +436,9 @@ class Sheet:
|
||||
self.tr_attributes = {}
|
||||
self.td_attributes = {}
|
||||
|
||||
self.c_headers = c_headers
|
||||
self.r_headers = r_headers
|
||||
|
||||
self.data = data
|
||||
self.readonly = readonly
|
||||
|
||||
@@ -505,7 +519,7 @@ class Sheet:
|
||||
self.environment[name] = obj
|
||||
|
||||
def cell(self, key, value, readonly=False, active=True,
|
||||
onchange=None, **kwarg):
|
||||
onchange=None, select=False, **kwarg):
|
||||
"""
|
||||
key is the name of the cell
|
||||
value is the initial value of the cell. It can be a formula "=1+3"
|
||||
@@ -528,7 +542,7 @@ class Sheet:
|
||||
value = value(r, c)
|
||||
|
||||
node = Node(key, value, self.url, readonly, active,
|
||||
onchange, **kwarg)
|
||||
onchange, select=select, **kwarg)
|
||||
self.nodes[key] = node
|
||||
self[key] = value
|
||||
|
||||
@@ -781,11 +795,19 @@ class Sheet:
|
||||
gluon.html.TH, gluon.html.BR, gluon.html.SCRIPT)
|
||||
regex = re.compile('r\d+c\d+')
|
||||
|
||||
header = TR(TH(), *[TH('c%s' % c)
|
||||
if not self.c_headers:
|
||||
header = TR(TH(), *[TH('c%s' % c)
|
||||
for c in range(self.cols)])
|
||||
else:
|
||||
header = TR(TH(), *[TH('%s' % c)
|
||||
for c in self.c_headers])
|
||||
|
||||
rows = []
|
||||
for r in range(self.rows):
|
||||
tds = [TH('r%s' % r), ]
|
||||
if not self.r_headers:
|
||||
tds = [TH('r%s' % r), ]
|
||||
else:
|
||||
tds = [TH('%s' % self.r_headers[r]), ]
|
||||
for c in range(self.cols):
|
||||
key = 'r%sc%s' % (r, c)
|
||||
attributes = {"_class": "w2p_spreadsheet_col_%s" %
|
||||
|
||||
@@ -146,8 +146,8 @@ class TokenHandler(tornado.web.RequestHandler):
|
||||
|
||||
class DistributeHandler(tornado.websocket.WebSocketHandler):
|
||||
|
||||
def check_origin(self, origin):
|
||||
return True
|
||||
def check_origin(self, origin):
|
||||
return True
|
||||
|
||||
def open(self, params):
|
||||
group, token, name = params.split('/') + [None, None]
|
||||
|
||||
@@ -41,7 +41,7 @@ class CustomImportException(ImportError):
|
||||
|
||||
def custom_importer(name, globals=None, locals=None, fromlist=None, level=-1):
|
||||
"""
|
||||
web2py's custom importer. It behaves like the standard Python importer but
|
||||
web2py's custom importer. It behaves like the standard Python importer but
|
||||
it tries to transform import statements as something like
|
||||
"import applications.app_name.modules.x".
|
||||
If the import fails, it falls back on naive_importer
|
||||
@@ -80,7 +80,7 @@ def custom_importer(name, globals=None, locals=None, fromlist=None, level=-1):
|
||||
if not fromlist:
|
||||
# import like "import x" or "import x.y"
|
||||
result = None
|
||||
for itemname in name.split("."):
|
||||
for itemname in name.split("."):
|
||||
new_mod = base_importer(
|
||||
modules_prefix, globals, locals, [itemname], level)
|
||||
try:
|
||||
|
||||
+29
-12
@@ -208,7 +208,7 @@ class Request(Storage):
|
||||
def parse_get_vars(self):
|
||||
"""Takes the QUERY_STRING and unpacks it to get_vars
|
||||
"""
|
||||
query_string = self.env.get('QUERY_STRING', '')
|
||||
query_string = self.env.get('query_string', '')
|
||||
dget = urlparse.parse_qs(query_string, keep_blank_values=1) # Ref: https://docs.python.org/2/library/cgi.html#cgi.parse_qs
|
||||
get_vars = self._get_vars = Storage(dget)
|
||||
for (key, value) in get_vars.iteritems():
|
||||
@@ -362,20 +362,30 @@ class Request(Storage):
|
||||
redirect(URL(scheme='https', args=self.args, vars=self.vars))
|
||||
|
||||
def restful(self):
|
||||
def wrapper(action, self=self):
|
||||
def f(_action=action, _self=self, *a, **b):
|
||||
self.is_restful = True
|
||||
method = _self.env.request_method
|
||||
if len(_self.args) and '.' in _self.args[-1]:
|
||||
_self.args[-1], _, self.extension = self.args[-1].rpartition('.')
|
||||
def wrapper(action, request=self):
|
||||
def f(_action=action, *a, **b):
|
||||
request.is_restful = True
|
||||
env = request.env
|
||||
is_json = env.content_type=='application/json'
|
||||
method = env.request_method
|
||||
if len(request.args) and '.' in request.args[-1]:
|
||||
request.args[-1], _, request.extension = request.args[-1].rpartition('.')
|
||||
current.response.headers['Content-Type'] = \
|
||||
contenttype('.' + _self.extension.lower())
|
||||
contenttype('.' + request.extension.lower())
|
||||
rest_action = _action().get(method, None)
|
||||
if not (rest_action and method == method.upper()
|
||||
and callable(rest_action)):
|
||||
raise HTTP(405, "method not allowed")
|
||||
try:
|
||||
return rest_action(*_self.args, **getattr(_self, 'vars', {}))
|
||||
vars = request.vars
|
||||
if method == 'POST' and is_json:
|
||||
body = request.body.read()
|
||||
if len(body):
|
||||
vars = sj.loads(body)
|
||||
res = rest_action(*request.args, **vars)
|
||||
if is_json and not isinstance(res, str):
|
||||
res = json(res)
|
||||
return res
|
||||
except TypeError, e:
|
||||
exc_type, exc_value, exc_traceback = sys.exc_info()
|
||||
if len(traceback.extract_tb(exc_traceback)) == 1:
|
||||
@@ -1023,10 +1033,16 @@ class Session(Storage):
|
||||
def _fixup_before_save(self):
|
||||
response = current.response
|
||||
rcookies = response.cookies
|
||||
if self._forget and response.session_id_name in rcookies:
|
||||
scookies = rcookies.get(response.session_id_name)
|
||||
if not scookies:
|
||||
return
|
||||
if self._forget:
|
||||
del rcookies[response.session_id_name]
|
||||
elif self._secure and response.session_id_name in rcookies:
|
||||
rcookies[response.session_id_name]['secure'] = True
|
||||
return
|
||||
if self.get('httponly_cookies',True):
|
||||
scookies['HttpOnly'] = True
|
||||
if self._secure:
|
||||
scookies['secure'] = True
|
||||
|
||||
def clear_session_cookies(self):
|
||||
request = current.request
|
||||
@@ -1074,6 +1090,7 @@ class Session(Storage):
|
||||
if response.session_storage_type == 'file':
|
||||
target = recfile.generate(response.session_filename)
|
||||
try:
|
||||
self._close(response)
|
||||
os.unlink(target)
|
||||
except:
|
||||
pass
|
||||
|
||||
+8
-3
@@ -668,7 +668,7 @@ class XML(XmlComponent):
|
||||
|
||||
|
||||
def XML_unpickle(data):
|
||||
return marshal.loads(data)
|
||||
return XML(marshal.loads(data))
|
||||
|
||||
|
||||
def XML_pickle(data):
|
||||
@@ -784,6 +784,9 @@ class DIV(XmlComponent):
|
||||
else:
|
||||
return self.components[i]
|
||||
|
||||
def get(self, i):
|
||||
return self.attributes.get(i)
|
||||
|
||||
def __setitem__(self, i, value):
|
||||
"""
|
||||
Sets attribute with name 'i' or component #i.
|
||||
@@ -1135,7 +1138,7 @@ class DIV(XmlComponent):
|
||||
for (key, value) in kargs.iteritems():
|
||||
if key not in ['first_only', 'replace', 'find_text']:
|
||||
if isinstance(value, (str, int)):
|
||||
if self[key] != str(value):
|
||||
if str(self[key]) != str(value):
|
||||
check = False
|
||||
elif key in self.attributes:
|
||||
if not value.search(str(self[key])):
|
||||
@@ -1856,6 +1859,8 @@ class INPUT(DIV):
|
||||
try:
|
||||
(value, errors) = validator(value)
|
||||
except:
|
||||
import traceback
|
||||
print traceback.format_exc()
|
||||
msg = "Validation error, field:%s %s" % (name,validator)
|
||||
raise Exception(msg)
|
||||
if not errors is None:
|
||||
@@ -2643,7 +2648,7 @@ def test():
|
||||
>>> form=FORM(INPUT(value="Hello World", _name="var", requires=IS_MATCH('^\w+$')))
|
||||
>>> isinstance(form.as_dict(), dict)
|
||||
True
|
||||
>>> form.as_dict(flat=True).has_key("vars")
|
||||
>>> "vars" in form.as_dict(flat=True)
|
||||
True
|
||||
>>> isinstance(form.as_json(), basestring) and len(form.as_json(sanitize=False)) > 0
|
||||
True
|
||||
|
||||
+9
-6
@@ -370,8 +370,8 @@ def wsgibase(environ, responder):
|
||||
cid = env.http_web2py_component_element,
|
||||
is_local = (env.remote_addr in local_hosts and
|
||||
client == env.remote_addr),
|
||||
is_shell = cmd_opts and cmd_opts.shell,
|
||||
is_sheduler = cmd_opts and cmd_opts.scheduler,
|
||||
is_shell = False,
|
||||
is_scheduler = False,
|
||||
is_https = env.wsgi_url_scheme in HTTPS_SCHEMES or \
|
||||
request.env.http_x_forwarded_proto in HTTPS_SCHEMES \
|
||||
or env.https == 'on'
|
||||
@@ -423,10 +423,13 @@ def wsgibase(environ, responder):
|
||||
# ##################################################
|
||||
|
||||
if env.http_cookie:
|
||||
try:
|
||||
request.cookies.load(env.http_cookie)
|
||||
except Cookie.CookieError, e:
|
||||
pass # invalid cookies
|
||||
for single_cookie in env.http_cookie.split(';'):
|
||||
single_cookie = single_cookie.strip()
|
||||
if single_cookie:
|
||||
try:
|
||||
request.cookies.load(single_cookie)
|
||||
except Cookie.CookieError:
|
||||
pass # single invalid cookie ignore
|
||||
|
||||
# ##################################################
|
||||
# try load session or create new session file
|
||||
|
||||
+1
-1
Submodule gluon/packages/dal updated: 6ea8659adc...598b2e999f
@@ -53,8 +53,8 @@ except:
|
||||
except:
|
||||
try:
|
||||
import win32con
|
||||
import win32file
|
||||
import pywintypes
|
||||
import win32file
|
||||
os_locking = 'windows'
|
||||
except:
|
||||
pass
|
||||
|
||||
+4
-18
@@ -9,7 +9,7 @@
|
||||
Generates names for cache and session files
|
||||
--------------------------------------------
|
||||
"""
|
||||
import os, uuid
|
||||
import os
|
||||
|
||||
|
||||
def generate(filename, depth=2, base=512):
|
||||
@@ -17,10 +17,10 @@ def generate(filename, depth=2, base=512):
|
||||
path, filename = os.path.split(filename)
|
||||
else:
|
||||
path = None
|
||||
dummyhash = sum(ord(c)*256**(i % 4) for i, c in enumerate(filename)) % base**depth
|
||||
dummyhash = sum(ord(c) * 256 ** (i % 4) for i, c in enumerate(filename)) % base ** depth
|
||||
folders = []
|
||||
for level in range(depth-1, -1, -1):
|
||||
code, dummyhash = divmod(dummyhash, base**level)
|
||||
for level in range(depth - 1, -1, -1):
|
||||
code, dummyhash = divmod(dummyhash, base ** level)
|
||||
folders.append("%03x" % code)
|
||||
folders.append(filename)
|
||||
if path:
|
||||
@@ -63,17 +63,3 @@ def open(filename, mode="r", path=None):
|
||||
if mode.startswith('w') and not os.path.exists(os.path.dirname(fullfilename)):
|
||||
os.makedirs(os.path.dirname(fullfilename))
|
||||
return file(fullfilename, mode)
|
||||
|
||||
|
||||
def test():
|
||||
if not os.path.exists('tests'):
|
||||
os.mkdir('tests')
|
||||
for k in range(20):
|
||||
filename = os.path.join('tests', str(uuid.uuid4()) + '.test')
|
||||
open(filename, "w").write('test')
|
||||
assert open(filename, "r").read() == 'test'
|
||||
if exists(filename):
|
||||
remove(filename)
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
||||
+1
-3
@@ -391,7 +391,6 @@ class MetaScheduler(threading.Thread):
|
||||
except:
|
||||
p.terminate()
|
||||
p.join()
|
||||
self.have_heartbeat = False
|
||||
logger.debug(' task stopped by general exception')
|
||||
tr = TaskReport(STOPPED)
|
||||
else:
|
||||
@@ -406,7 +405,6 @@ class MetaScheduler(threading.Thread):
|
||||
except Queue.Empty:
|
||||
tr = TaskReport(TIMEOUT)
|
||||
elif queue.empty():
|
||||
self.have_heartbeat = False
|
||||
logger.debug(' task stopped')
|
||||
tr = TaskReport(STOPPED)
|
||||
else:
|
||||
@@ -922,7 +920,7 @@ class Scheduler(MetaScheduler):
|
||||
else:
|
||||
st_mapping = {'FAILED': 'FAILED',
|
||||
'TIMEOUT': 'TIMEOUT',
|
||||
'STOPPED': 'QUEUED'}[task_report.status]
|
||||
'STOPPED': 'FAILED'}[task_report.status]
|
||||
status = (task.retry_failed
|
||||
and task.times_failed < task.retry_failed
|
||||
and QUEUED or task.retry_failed == -1
|
||||
|
||||
@@ -129,6 +129,8 @@ def env(
|
||||
if global_settings.cmd_options:
|
||||
ip = global_settings.cmd_options.ip
|
||||
port = global_settings.cmd_options.port
|
||||
request.is_shell = global_settings.cmd_options.shell is not None
|
||||
request.is_scheduler = global_settings.cmd_options.scheduler is not None
|
||||
else:
|
||||
ip, port = '127.0.0.1', '8000'
|
||||
request.env.http_host = '%s:%s' % (ip, port)
|
||||
|
||||
+63
-42
@@ -29,7 +29,7 @@ from gluon.html import URL, FIELDSET, P, DEFAULT_PASSWORD_DISPLAY
|
||||
from pydal.base import DEFAULT
|
||||
from pydal.objects import Table, Row, Expression, Field
|
||||
from pydal.adapters.base import CALLABLETYPES
|
||||
from pydal.helpers.methods import smart_query, bar_encode
|
||||
from pydal.helpers.methods import smart_query, bar_encode, _repr_ref
|
||||
from pydal.helpers.classes import Reference, SQLCustomType
|
||||
from gluon.storage import Storage
|
||||
from gluon.utils import md5_hash
|
||||
@@ -71,6 +71,26 @@ def represent(field, value, record):
|
||||
else:
|
||||
raise RuntimeError("field representation must take 1 or 2 args")
|
||||
|
||||
class CacheRepresenter(object):
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
def __call__(self, field, value, row):
|
||||
cache = self.cache
|
||||
if field not in cache:
|
||||
cache[field] = {}
|
||||
try:
|
||||
nvalue = cache[field][value]
|
||||
except KeyError:
|
||||
try:
|
||||
nvalue = field.represent(value, row)
|
||||
except KeyError:
|
||||
try:
|
||||
nvalue = field.represent(value, row[field.tablename])
|
||||
except KeyError:
|
||||
nvalue = None
|
||||
if isinstance(field, _repr_ref):
|
||||
cache[field][value] = nvalue
|
||||
return nvalue
|
||||
|
||||
def safe_int(x):
|
||||
try:
|
||||
@@ -626,13 +646,12 @@ class AutocompleteWidget(object):
|
||||
def __init__(self, request, field, id_field=None, db=None,
|
||||
orderby=None, limitby=(0, 10), distinct=False,
|
||||
keyword='_autocomplete_%(tablename)s_%(fieldname)s',
|
||||
min_length=2, help_fields=None, help_string=None):
|
||||
min_length=2, help_fields=None, help_string=None, at_beginning = True):
|
||||
|
||||
self.help_fields = help_fields or []
|
||||
self.help_string = help_string
|
||||
if self.help_fields and not self.help_string:
|
||||
self.help_string = ' '.join('%%(%s)s' % f.name
|
||||
for f in self.help_fields)
|
||||
self.help_string = ' '.join('%%(%s)s' % f.name for f in self.help_fields)
|
||||
|
||||
self.request = request
|
||||
self.keyword = keyword % dict(tablename=field.tablename,
|
||||
@@ -642,6 +661,7 @@ class AutocompleteWidget(object):
|
||||
self.limitby = limitby
|
||||
self.distinct = distinct
|
||||
self.min_length = min_length
|
||||
self.at_beginning = at_beginning
|
||||
self.fields = [field]
|
||||
if id_field:
|
||||
self.is_reference = True
|
||||
@@ -659,8 +679,10 @@ class AutocompleteWidget(object):
|
||||
field = self.fields[0]
|
||||
if settings and settings.global_settings.web2py_runtime_gae:
|
||||
rows = self.db(field.__ge__(self.request.vars[self.keyword]) & field.__lt__(self.request.vars[self.keyword] + u'\ufffd')).select(orderby=self.orderby, limitby=self.limitby, *(self.fields+self.help_fields))
|
||||
else:
|
||||
elif self.at_beginning:
|
||||
rows = self.db(field.like(self.request.vars[self.keyword] + '%', case_sensitive=False)).select(orderby=self.orderby, limitby=self.limitby, distinct=self.distinct, *(self.fields+self.help_fields))
|
||||
else:
|
||||
rows = self.db(field.contains(self.request.vars[self.keyword], case_sensitive=False)).select(orderby=self.orderby, limitby=self.limitby, distinct=self.distinct, *(self.fields+self.help_fields))
|
||||
if rows:
|
||||
if self.is_reference:
|
||||
id_field = self.fields[1]
|
||||
@@ -714,7 +736,7 @@ class AutocompleteWidget(object):
|
||||
name=name, div_id=div_id, u='F' + self.keyword)
|
||||
if self.min_length == 0:
|
||||
attr['_onfocus'] = attr['_onkeyup']
|
||||
return CAT(INPUT(**attr),
|
||||
return CAT(INPUT(**attr),
|
||||
INPUT(_type='hidden', _id=key3, _value=value,
|
||||
_name=name, requires=field.requires),
|
||||
DIV(_id=div_id, _style='position:absolute;'))
|
||||
@@ -727,7 +749,7 @@ class AutocompleteWidget(object):
|
||||
key=self.keyword, id=attr['_id'], div_id=div_id, u='F' + self.keyword)
|
||||
if self.min_length == 0:
|
||||
attr['_onfocus'] = attr['_onkeyup']
|
||||
return CAT(INPUT(**attr),
|
||||
return CAT(INPUT(**attr),
|
||||
DIV(_id=div_id, _style='position:absolute;'))
|
||||
|
||||
|
||||
@@ -818,7 +840,7 @@ def formstyle_bootstrap(form, fields):
|
||||
controls.add_class('span4')
|
||||
|
||||
if isinstance(label, LABEL):
|
||||
label['_class'] = 'control-label'
|
||||
label['_class'] = add_class(label.get('_class'),'control-label')
|
||||
|
||||
if _submit:
|
||||
# submit button has unwrapped label and controls, different class
|
||||
@@ -868,7 +890,7 @@ def formstyle_bootstrap3_stacked(form, fields):
|
||||
e.add_class('form-control')
|
||||
|
||||
if isinstance(label, LABEL):
|
||||
label['_class'] = 'control-label'
|
||||
label['_class'] = add_class(label.get('_class'),'control-label')
|
||||
|
||||
parent.append(DIV(label, _controls, _class='form-group', _id=id))
|
||||
return parent
|
||||
@@ -916,8 +938,10 @@ def formstyle_bootstrap3_inline_factory(col_label_size=3):
|
||||
elif isinstance(controls, UL):
|
||||
for e in controls.elements("input"):
|
||||
e.add_class('form-control')
|
||||
elif controls is None or isinstance(controls, basestring):
|
||||
_controls = P(controls, _class="form-control-static %s" % col_class)
|
||||
if isinstance(label, LABEL):
|
||||
label['_class'] = 'control-label %s' % label_col_class
|
||||
label['_class'] = add_class(label.get('_class'),'control-label %s' % label_col_class)
|
||||
|
||||
parent.append(DIV(label, _controls, _class='form-group', _id=id))
|
||||
return parent
|
||||
@@ -1100,10 +1124,12 @@ class SQLFORM(FORM):
|
||||
raise HTTP(404, "Object not found")
|
||||
self.record = record
|
||||
|
||||
self.record_id = record_id
|
||||
if keyed:
|
||||
self.record_id = dict([(k, record and str(record[k]) or None)
|
||||
for k in table._primarykey])
|
||||
else:
|
||||
self.record_id = record_id
|
||||
|
||||
self.field_parent = {}
|
||||
xfields = []
|
||||
self.fields = fields
|
||||
@@ -1126,7 +1152,8 @@ class SQLFORM(FORM):
|
||||
extra_fields = extra_fields or []
|
||||
self.extra_fields = {}
|
||||
for extra_field in extra_fields:
|
||||
self.fields.append(extra_field.name)
|
||||
if not extra_field.name in self.fields:
|
||||
self.fields.append(extra_field.name)
|
||||
self.extra_fields[extra_field.name] = extra_field
|
||||
extra_field.db = table._db
|
||||
extra_field.table = table
|
||||
@@ -1160,6 +1187,14 @@ class SQLFORM(FORM):
|
||||
label = LABEL(label, label and sep, _for=field_id,
|
||||
_id=field_id + SQLFORM.ID_LABEL_SUFFIX)
|
||||
|
||||
cond = readonly or \
|
||||
(not ignore_rw and not field.writable and field.readable)
|
||||
|
||||
if cond:
|
||||
label['_class'] = 'readonly'
|
||||
else:
|
||||
label['_class'] = ''
|
||||
|
||||
row_id = field_id + SQLFORM.ID_ROW_SUFFIX
|
||||
if field.type == 'id':
|
||||
self.custom.dspval.id = nbsp
|
||||
@@ -1188,8 +1223,6 @@ class SQLFORM(FORM):
|
||||
default = field.default
|
||||
if isinstance(default, CALLABLETYPES):
|
||||
default = default()
|
||||
cond = readonly or \
|
||||
(not ignore_rw and not field.writable and field.readable)
|
||||
|
||||
if default is not None and not cond:
|
||||
default = field.formatter(default)
|
||||
@@ -1471,13 +1504,12 @@ class SQLFORM(FORM):
|
||||
hideerror=hideerror,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
self.deleted = \
|
||||
request_vars.get(self.FIELDNAME_REQUEST_DELETE, False)
|
||||
|
||||
self.deleted = request_vars.get(self.FIELDNAME_REQUEST_DELETE, False)
|
||||
|
||||
self.custom.end = CAT(self.hidden_fields(), self.custom.end)
|
||||
|
||||
auch = record_id and self.errors and self.deleted
|
||||
delete_exception = self.record_id and self.errors and self.deleted
|
||||
|
||||
if self.record_changed and self.detect_record_change:
|
||||
message_onchange = \
|
||||
@@ -1489,8 +1521,9 @@ class SQLFORM(FORM):
|
||||
if message_onchange is not None:
|
||||
current.response.flash = message_onchange
|
||||
return ret
|
||||
elif (not ret) and (not auch):
|
||||
# auch is true when user tries to delete a record
|
||||
|
||||
elif (not ret) and (not delete_exception):
|
||||
# delete_exception is true when user tries to delete a record
|
||||
# that does not pass validation, yet it should be deleted
|
||||
for fieldname in self.fields:
|
||||
|
||||
@@ -1520,9 +1553,10 @@ class SQLFORM(FORM):
|
||||
self.accepted = ret
|
||||
return ret
|
||||
|
||||
if record_id and str(record_id) != str(self.record_id):
|
||||
raise SyntaxError('user is tampering with form\'s record_id: '
|
||||
'%s != %s' % (record_id, self.record_id))
|
||||
if self.record_id:
|
||||
if str(record_id) != str(self.record_id):
|
||||
raise SyntaxError('user is tampering with form\'s record_id: '
|
||||
'%s != %s' % (record_id, self.record_id))
|
||||
|
||||
if record_id and dbio and not keyed:
|
||||
self.vars.id = self.record[self.id_field_name]
|
||||
@@ -1686,6 +1720,7 @@ class SQLFORM(FORM):
|
||||
self.id_field_name]).update(**fields)
|
||||
else:
|
||||
self.vars.id = self.table.insert(**fields)
|
||||
|
||||
self.accepted = ret
|
||||
return ret
|
||||
|
||||
@@ -2048,7 +2083,7 @@ class SQLFORM(FORM):
|
||||
## if it's not an integer
|
||||
if cache_count is None or isinstance(cache_count, tuple):
|
||||
if groupby:
|
||||
c = 'count(*)'
|
||||
c = 'count(*) AS count_all'
|
||||
nrows = db.executesql(
|
||||
'select count(*) from (%s) _tmp;' %
|
||||
dbset._select(c, left=left, cacheable=True,
|
||||
@@ -2083,7 +2118,7 @@ class SQLFORM(FORM):
|
||||
elif isinstance(orderby, Field) and orderby is not field_id:
|
||||
# here we're with an ASC order on a field stored as orderby
|
||||
orderby = orderby | field_id
|
||||
elif (isinstance(orderby, Expression) and
|
||||
elif (isinstance(orderby, Expression) and
|
||||
orderby.first and orderby.first is not field_id):
|
||||
# here we're with a DESC order on a field stored as orderby.first
|
||||
orderby = orderby | field_id
|
||||
@@ -2668,7 +2703,7 @@ class SQLFORM(FORM):
|
||||
htmltable = TABLE(COLGROUP(*cols), THEAD(head))
|
||||
tbody = TBODY()
|
||||
numrec = 0
|
||||
repr_cache = {}
|
||||
repr_cache = CacheRepresenter()
|
||||
for row in rows:
|
||||
trcols = []
|
||||
id = row[field_id]
|
||||
@@ -2688,27 +2723,13 @@ class SQLFORM(FORM):
|
||||
maxlength = maxtextlengths.get(str(field), maxtextlength)
|
||||
if field.represent:
|
||||
if field.type.startswith('reference'):
|
||||
if field not in repr_cache:
|
||||
repr_cache[field] = {}
|
||||
try:
|
||||
nvalue = repr_cache[field][value]
|
||||
except KeyError:
|
||||
try:
|
||||
nvalue = field.represent(value, row)
|
||||
except KeyError:
|
||||
try:
|
||||
nvalue = field.represent(
|
||||
value, row[field.tablename])
|
||||
except KeyError:
|
||||
nvalue = None
|
||||
repr_cache[field][value] = nvalue
|
||||
nvalue = repr_cache(field, value, row)
|
||||
else:
|
||||
try:
|
||||
nvalue = field.represent(value, row)
|
||||
except KeyError:
|
||||
try:
|
||||
nvalue = field.represent(
|
||||
value, row[field.tablename])
|
||||
nvalue = field.represent(value, row[field.tablename])
|
||||
except KeyError:
|
||||
nvalue = None
|
||||
value = nvalue
|
||||
|
||||
+4
-4
@@ -25,7 +25,6 @@ regex_stop_range = re.compile('(?<=\-)\d+')
|
||||
|
||||
DEFAULT_CHUNK_SIZE = 64 * 1024
|
||||
|
||||
|
||||
def streamer(stream, chunk_size=DEFAULT_CHUNK_SIZE, bytes=None):
|
||||
offset = 0
|
||||
while bytes is None or offset < bytes:
|
||||
@@ -51,11 +50,12 @@ def stream_file_or_304_or_206(
|
||||
status=200,
|
||||
error_message=None
|
||||
):
|
||||
if error_message is None:
|
||||
error_message = rewrite.THREAD_LOCAL.routes.error_message % 'invalid request'
|
||||
# FIX THIS
|
||||
# if error_message is None:
|
||||
# error_message = rewrite.THREAD_LOCAL.routes.error_message % 'invalid request'
|
||||
try:
|
||||
open = file # this makes no sense but without it GAE cannot open files
|
||||
fp = open(static_file)
|
||||
fp = open(static_file,'rb')
|
||||
except IOError, e:
|
||||
if e[0] == errno.EISDIR:
|
||||
raise HTTP(403, error_message, web2py_error='file is a directory')
|
||||
|
||||
@@ -898,6 +898,9 @@ def render(content="hello world",
|
||||
if not 'NOESCAPE' in context:
|
||||
context['NOESCAPE'] = NOESCAPE
|
||||
|
||||
if isinstance(content, unicode):
|
||||
content = content.encode('utf8')
|
||||
|
||||
# save current response class
|
||||
if context and 'response' in context:
|
||||
old_response_body = context['response'].body
|
||||
|
||||
@@ -3,12 +3,14 @@ import sys
|
||||
from test_http import *
|
||||
from test_cache import *
|
||||
from test_contenttype import *
|
||||
from test_compileapp import *
|
||||
from test_fileutils import *
|
||||
from test_globals import *
|
||||
from test_html import *
|
||||
from test_is_url import *
|
||||
from test_languages import *
|
||||
from test_router import *
|
||||
from test_recfile import *
|
||||
from test_routes import *
|
||||
from test_storage import *
|
||||
from test_serializers import *
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
""" Unit tests for utils.py """
|
||||
|
||||
import unittest
|
||||
from fix_path import fix_sys_path
|
||||
|
||||
fix_sys_path(__file__)
|
||||
|
||||
from compileapp import compile_application, remove_compiled_application
|
||||
from gluon.fileutils import w2p_pack, w2p_unpack
|
||||
import os
|
||||
|
||||
|
||||
class TestPack(unittest.TestCase):
|
||||
""" Tests the compileapp.py module """
|
||||
|
||||
def test_compile(self):
|
||||
#apps = ['welcome', 'admin', 'examples']
|
||||
apps = ['welcome']
|
||||
for appname in apps:
|
||||
appname_path = os.path.join(os.getcwd(), 'applications', appname)
|
||||
compile_application(appname_path)
|
||||
remove_compiled_application(appname_path)
|
||||
test_path = os.path.join(os.getcwd(), "%s.w2p" % appname)
|
||||
unpack_path = os.path.join(os.getcwd(), 'unpack', appname)
|
||||
w2p_pack(test_path, appname_path, compiled=True, filenames=None)
|
||||
w2p_pack(test_path, appname_path, compiled=False, filenames=None)
|
||||
w2p_unpack(test_path, unpack_path)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
+69
-1
@@ -4,15 +4,16 @@
|
||||
Unit tests for gluon.dal
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
from fix_path import fix_sys_path
|
||||
|
||||
fix_sys_path(__file__)
|
||||
|
||||
|
||||
from gluon.dal import DAL, Field
|
||||
|
||||
|
||||
def tearDownModule():
|
||||
try:
|
||||
os.unlink('dummy.db')
|
||||
@@ -50,6 +51,73 @@ class TestDefaultValidators(unittest.TestCase):
|
||||
pass
|
||||
"""
|
||||
|
||||
|
||||
def _prepare_exec_for_file(filename):
|
||||
module = []
|
||||
if filename.endswith('.py'):
|
||||
filename = filename[:-3]
|
||||
elif os.path.split(filename)[1] == '__init__.py':
|
||||
filename = os.path.dirname(filename)
|
||||
else:
|
||||
raise 'The file provided (%s) does is not a valid Python file.'
|
||||
filename = os.path.realpath(filename)
|
||||
dirpath = filename
|
||||
while 1:
|
||||
dirpath, extra = os.path.split(dirpath)
|
||||
module.append(extra)
|
||||
if not os.path.isfile(os.path.join(dirpath, '__init__.py')):
|
||||
break
|
||||
sys.path.insert(0, dirpath)
|
||||
return '.'.join(module[::-1])
|
||||
|
||||
|
||||
def load_pydal_tests_module():
|
||||
path = os.path.dirname(os.path.abspath(__file__))
|
||||
if not os.path.isfile(os.path.join(path, 'web2py.py')):
|
||||
i = 0
|
||||
while i < 10:
|
||||
i += 1
|
||||
if os.path.exists(os.path.join(path, 'web2py.py')):
|
||||
break
|
||||
path = os.path.abspath(os.path.join(path, '..'))
|
||||
pydal_test_path = os.path.join(
|
||||
path, "gluon", "packages", "dal", "tests", "__init__.py")
|
||||
mname = _prepare_exec_for_file(pydal_test_path)
|
||||
mod = __import__(mname)
|
||||
return mod
|
||||
|
||||
|
||||
def pydal_suite():
|
||||
mod = load_pydal_tests_module()
|
||||
suite = unittest.TestSuite()
|
||||
tlist = [
|
||||
getattr(mod, el) for el in mod.__dict__.keys() if el.startswith("Test")
|
||||
]
|
||||
for t in tlist:
|
||||
suite.addTest(unittest.makeSuite(t))
|
||||
return suite
|
||||
|
||||
|
||||
class TestDALAdapters(unittest.TestCase):
|
||||
def _run_tests(self):
|
||||
suite = pydal_suite()
|
||||
return unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
def test_mysql(self):
|
||||
if os.environ.get('APPVEYOR'):
|
||||
return
|
||||
os.environ["DB"] = "mysql://root:@localhost/pydal"
|
||||
result = self._run_tests()
|
||||
self.assertTrue(result)
|
||||
|
||||
def test_pg8000(self):
|
||||
if os.environ.get('APPVEYOR'):
|
||||
return
|
||||
os.environ["DB"] = "postgres:pg8000://postgres:@localhost/pydal"
|
||||
result = self._run_tests()
|
||||
self.assertTrue(result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
tearDownModule()
|
||||
|
||||
@@ -6,17 +6,43 @@
|
||||
"""
|
||||
|
||||
|
||||
import re
|
||||
import unittest
|
||||
from fix_path import fix_sys_path
|
||||
|
||||
fix_sys_path(__file__)
|
||||
|
||||
from gluon.globals import Response
|
||||
from gluon.globals import Request, Response, Session
|
||||
from gluon import URL
|
||||
|
||||
def setup_clean_session():
|
||||
request = Request(env={})
|
||||
request.application = 'a'
|
||||
request.controller = 'c'
|
||||
request.function = 'f'
|
||||
request.folder = 'applications/admin'
|
||||
response = Response()
|
||||
session = Session()
|
||||
session.connect(request, response)
|
||||
from gluon.globals import current
|
||||
current.request = request
|
||||
current.response = response
|
||||
current.session = session
|
||||
return current
|
||||
|
||||
class testResponse(unittest.TestCase):
|
||||
|
||||
#port from python 2.7, needed for 2.5 and 2.6 tests
|
||||
def assertRegexpMatches(self, text, expected_regexp, msg=None):
|
||||
"""Fail the test unless the text matches the regular expression."""
|
||||
if isinstance(expected_regexp, basestring):
|
||||
expected_regexp = re.compile(expected_regexp)
|
||||
if not expected_regexp.search(text):
|
||||
msg = msg or "Regexp didn't match"
|
||||
msg = '%s: %r not found in %r' % (
|
||||
msg, expected_regexp.pattern, text)
|
||||
raise self.failureException(msg)
|
||||
|
||||
def test_include_files(self):
|
||||
|
||||
def return_includes(response, extensions=None):
|
||||
@@ -120,5 +146,43 @@ class testResponse(unittest.TestCase):
|
||||
content = return_includes(response)
|
||||
self.assertEqual(content, '')
|
||||
|
||||
def test_cookies(self):
|
||||
current = setup_clean_session()
|
||||
cookie = str(current.response.cookies)
|
||||
session_key='%s=%s'%(current.response.session_id_name,current.response.session_id)
|
||||
self.assertRegexpMatches(cookie, r'^Set-Cookie: ')
|
||||
self.assertTrue(session_key in cookie)
|
||||
self.assertTrue('Path=/' in cookie)
|
||||
|
||||
def test_cookies_secure(self):
|
||||
current = setup_clean_session()
|
||||
current.session._fixup_before_save()
|
||||
cookie = str(current.response.cookies)
|
||||
self.assertTrue('secure' not in cookie)
|
||||
|
||||
current = setup_clean_session()
|
||||
current.session.secure()
|
||||
current.session._fixup_before_save()
|
||||
cookie = str(current.response.cookies)
|
||||
self.assertTrue('secure' in cookie)
|
||||
|
||||
def test_cookies_httponly(self):
|
||||
current = setup_clean_session()
|
||||
current.session._fixup_before_save()
|
||||
cookie = str(current.response.cookies)
|
||||
self.assertTrue('httponly' in cookie)
|
||||
|
||||
current = setup_clean_session()
|
||||
current.session.httponly_cookies = True
|
||||
current.session._fixup_before_save()
|
||||
cookie = str(current.response.cookies)
|
||||
self.assertTrue('httponly' in cookie)
|
||||
|
||||
current = setup_clean_session()
|
||||
current.session.httponly_cookies = False
|
||||
current.session._fixup_before_save()
|
||||
cookie = str(current.response.cookies)
|
||||
self.assertTrue('httponly' not in cookie)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -309,7 +309,7 @@ class TestBareHelpers(unittest.TestCase):
|
||||
self.assertEqual(XML('<h1>Hello<a data-hello="world">World</a></h1>', sanitize=True),
|
||||
XML('<h1>HelloWorld</h1>'))
|
||||
#bug check for the sanitizer for closing no-close tags
|
||||
self.assertEqual(XML('<p>Test</p><br/><p>Test</p><br/>', sanitize=True),
|
||||
self.assertEqual(XML('<p>Test</p><br/><p>Test</p><br/>', sanitize=True),
|
||||
XML('<p>Test</p><br /><p>Test</p><br />'))
|
||||
|
||||
def testTAG(self):
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Unit tests for gluon.recfile
|
||||
"""
|
||||
import unittest
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
from fix_path import fix_sys_path
|
||||
|
||||
fix_sys_path(__file__)
|
||||
|
||||
from gluon import recfile
|
||||
|
||||
|
||||
class TestRecfile(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
os.mkdir('tests')
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree('tests')
|
||||
|
||||
def testgeneration(self):
|
||||
for k in range(20):
|
||||
teststring = 'test%s' % k
|
||||
filename = os.path.join('tests', str(uuid.uuid4()) + '.test')
|
||||
with recfile.open(filename, "w") as g:
|
||||
g.write(teststring)
|
||||
self.assertEqual(recfile.open(filename, "r").read(), teststring)
|
||||
is_there = recfile.exists(filename)
|
||||
self.assertTrue(is_there)
|
||||
recfile.remove(filename)
|
||||
is_there = recfile.exists(filename)
|
||||
self.assertFalse(is_there)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -146,7 +146,7 @@ class TestList(unittest.TestCase):
|
||||
'something')
|
||||
# except if default is especified
|
||||
self.assertEqual(b(0, default=0, otherwise=lambda: 'something'), 0)
|
||||
|
||||
|
||||
def test_listgetitem(self):
|
||||
'''Mantains list behaviour.'''
|
||||
a = List((1, 2, 3))
|
||||
|
||||
+39
-30
@@ -13,7 +13,7 @@ from utils import compare
|
||||
|
||||
import hashlib
|
||||
from hashlib import md5, sha1, sha224, sha256, sha384, sha512
|
||||
from utils import simple_hash, get_digest
|
||||
from utils import simple_hash, get_digest, secure_dumps, secure_loads
|
||||
|
||||
|
||||
class TestUtils(unittest.TestCase):
|
||||
@@ -24,68 +24,77 @@ class TestUtils(unittest.TestCase):
|
||||
|
||||
data = md5_hash("web2py rocks")
|
||||
self.assertEqual(data, '79509f3246a2824dee64635303e99204')
|
||||
|
||||
|
||||
def test_compare(self):
|
||||
""" Tests the compare funciton """
|
||||
|
||||
|
||||
a, b = 'test123', 'test123'
|
||||
compare_result_true = compare(a, b)
|
||||
self.assertTrue(compare_result_true)
|
||||
|
||||
|
||||
a, b = 'test123', 'test456'
|
||||
compare_result_false = compare(a, b)
|
||||
self.assertFalse(compare_result_false)
|
||||
|
||||
|
||||
def test_simple_hash(self):
|
||||
""" Tests the simple_hash function """
|
||||
|
||||
|
||||
# no key, no salt, md5
|
||||
data_md5 = simple_hash('web2py rocks!', key='', salt='', digest_alg='md5')
|
||||
self.assertEqual(data_md5, '37d95defba6c8834cb8cae86ee888568')
|
||||
|
||||
|
||||
# no key, no salt, sha1
|
||||
data_sha1 = simple_hash('web2py rocks!', key='', salt='', digest_alg='sha1')
|
||||
self.assertEqual(data_sha1, '00489a46753d8db260c71542611cdef80652c4b7')
|
||||
|
||||
|
||||
# no key, no salt, sha224
|
||||
data_sha224 = simple_hash('web2py rocks!', key='', salt='', digest_alg='sha224')
|
||||
self.assertEqual(data_sha224, '84d7054271842c2c17983baa2b1447e0289d101140a8c002d49d60da')
|
||||
|
||||
|
||||
# no key, no salt, sha256
|
||||
data_sha256 = simple_hash('web2py rocks!', key='', salt='', digest_alg='sha256')
|
||||
self.assertEqual(data_sha256, '0849f224d8deb267e4598702aaec1bd749e6caec90832469891012a4be24af08')
|
||||
|
||||
|
||||
# no key, no salt, sha384
|
||||
data_sha384 = simple_hash('web2py rocks!', key='', salt='', digest_alg='sha384')
|
||||
self.assertEqual(data_sha384,
|
||||
self.assertEqual(data_sha384,
|
||||
'3cffaf39371adbe84eb10f588d2718207d8e965e9172a27a278321b86977351376ae79f92e91d8c58cad86c491282d5f')
|
||||
|
||||
|
||||
# no key, no salt, sha512
|
||||
data_sha512 = simple_hash('web2py rocks!', key='', salt='', digest_alg='sha512')
|
||||
self.assertEqual(data_sha512, 'fa3237f594743e1d7b6c800bb134b3255cf4a98ab8b01e2ec23256328c9f8059'
|
||||
'64fdef25a038d6cc3fda1b2fb45d66461eeed5c4669e506ec8bdfee71348db7e')
|
||||
|
||||
def test_secure_dumps_and_loads(self):
|
||||
""" Tests secure_dumps and secure_loads"""
|
||||
testobj = {'a': 1, 'b': 2}
|
||||
testkey = 'mysecret'
|
||||
secured = secure_dumps(testobj, testkey)
|
||||
original = secure_loads(secured, testkey)
|
||||
self.assertEqual(testobj, original)
|
||||
self.assertTrue(isinstance(secured, basestring))
|
||||
self.assertTrue(':' in secured)
|
||||
|
||||
large_testobj = [x for x in range(1000)]
|
||||
secured_comp = secure_dumps(large_testobj, testkey, compression_level=9)
|
||||
original_comp = secure_loads(secured_comp, testkey, compression_level=9)
|
||||
self.assertEqual(large_testobj, original_comp)
|
||||
secured = secure_dumps(large_testobj, testkey)
|
||||
self.assertTrue(len(secured_comp) < len(secured))
|
||||
|
||||
class TestPack(unittest.TestCase):
|
||||
""" Tests the compileapp.py module """
|
||||
testhash = 'myhash'
|
||||
secured = secure_dumps(testobj, testkey, testhash)
|
||||
original = secure_loads(secured, testkey, testhash)
|
||||
self.assertEqual(testobj, original)
|
||||
|
||||
def test_compile(self):
|
||||
from compileapp import compile_application, remove_compiled_application
|
||||
from gluon.fileutils import w2p_pack, w2p_unpack
|
||||
import os
|
||||
#apps = ['welcome', 'admin', 'examples']
|
||||
apps = ['welcome']
|
||||
for appname in apps:
|
||||
appname_path = os.path.join(os.getcwd(), 'applications', appname)
|
||||
compile_application(appname_path)
|
||||
remove_compiled_application(appname_path)
|
||||
test_path = os.path.join(os.getcwd(), "%s.w2p" % appname)
|
||||
unpack_path = os.path.join(os.getcwd(), 'unpack', appname)
|
||||
w2p_pack(test_path, appname_path, compiled=True, filenames=None)
|
||||
w2p_pack(test_path, appname_path, compiled=False, filenames=None)
|
||||
w2p_unpack(test_path, unpack_path)
|
||||
return
|
||||
wrong1 = secure_loads(secured, testkey, 'wronghash')
|
||||
self.assertEqual(wrong1, None)
|
||||
wrong2 = secure_loads(secured, 'wrongkey', testhash)
|
||||
self.assertEqual(wrong2, None)
|
||||
wrong3 = secure_loads(secured, 'wrongkey', 'wronghash')
|
||||
self.assertEqual(wrong3, None)
|
||||
wrong4 = secure_loads('abc', 'a', 'b')
|
||||
self.assertEqual(wrong4, None)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
@@ -618,6 +618,9 @@ class TestValidators(unittest.TestCase):
|
||||
self.assertEqual(rtn, (u'hell', None))
|
||||
rtn = IS_MATCH('hell', is_unicode=True)(u'hell')
|
||||
self.assertEqual(rtn, (u'hell', None))
|
||||
# regr test for #1044
|
||||
rtn = IS_MATCH('hello')(u'\xff')
|
||||
self.assertEqual(rtn, (u'\xff', 'Invalid expression'))
|
||||
|
||||
|
||||
def test_IS_EQUAL_TO(self):
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
+746
-283
File diff suppressed because it is too large
Load Diff
+9
-1
@@ -64,6 +64,10 @@ else:
|
||||
except (ImportError, ValueError):
|
||||
HAVE_PBKDF2 = False
|
||||
|
||||
HAVE_COMPARE_DIGEST = False
|
||||
if hasattr(hmac, 'compare_digest'):
|
||||
HAVE_COMPARE_DIGEST = True
|
||||
|
||||
logger = logging.getLogger("web2py")
|
||||
|
||||
|
||||
@@ -77,6 +81,8 @@ def AES_new(key, IV=None):
|
||||
|
||||
def compare(a, b):
|
||||
""" Compares two strings and not vulnerable to timing attacks """
|
||||
if HAVE_COMPARE_DIGEST:
|
||||
return hmac.compare_digest(a, b)
|
||||
if len(a) != len(b):
|
||||
return False
|
||||
result = 0
|
||||
@@ -143,6 +149,7 @@ DIGEST_ALG_BY_SIZE = {
|
||||
512 / 4: 'sha512',
|
||||
}
|
||||
|
||||
|
||||
def get_callable_argspec(fn):
|
||||
if inspect.isfunction(fn) or inspect.ismethod(fn):
|
||||
inspectable = fn
|
||||
@@ -154,6 +161,7 @@ def get_callable_argspec(fn):
|
||||
inspectable = fn
|
||||
return inspect.getargspec(inspectable)
|
||||
|
||||
|
||||
def pad(s, n=32, padchar=' '):
|
||||
return s + (32 - len(s) % 32) * padchar
|
||||
|
||||
@@ -172,7 +180,7 @@ def secure_dumps(data, encryption_key, hash_key=None, compression_level=None):
|
||||
|
||||
|
||||
def secure_loads(data, encryption_key, hash_key=None, compression_level=None):
|
||||
if not ':' in data:
|
||||
if ':' not in data:
|
||||
return None
|
||||
if not hash_key:
|
||||
hash_key = sha1(encryption_key).hexdigest()
|
||||
|
||||
+107
-61
@@ -22,7 +22,7 @@ import decimal
|
||||
import unicodedata
|
||||
from cStringIO import StringIO
|
||||
from gluon.utils import simple_hash, web2py_uuid, DIGEST_ALG_BY_SIZE
|
||||
from pydal.objects import FieldVirtual, FieldMethod
|
||||
from pydal.objects import Field, FieldVirtual, FieldMethod
|
||||
|
||||
regex_isint = re.compile('^[+-]?\d+$')
|
||||
|
||||
@@ -201,12 +201,15 @@ class IS_MATCH(Validator):
|
||||
|
||||
def __call__(self, value):
|
||||
if self.is_unicode:
|
||||
if isinstance(value,unicode):
|
||||
match = self.regex.search(value)
|
||||
else:
|
||||
if not isinstance(value, unicode):
|
||||
match = self.regex.search(str(value).decode('utf8'))
|
||||
else:
|
||||
match = self.regex.search(value)
|
||||
else:
|
||||
match = self.regex.search(str(value))
|
||||
if not isinstance(value, unicode):
|
||||
match = self.regex.search(str(value))
|
||||
else:
|
||||
match = self.regex.search(value.encode('utf8'))
|
||||
if match is not None:
|
||||
return (self.extract and match.group() or value, None)
|
||||
return (value, translate(self.error_message))
|
||||
@@ -509,34 +512,44 @@ class IS_IN_DB(Validator):
|
||||
zero='',
|
||||
sort=False,
|
||||
_and=None,
|
||||
left=None
|
||||
left=None,
|
||||
delimiter=None,
|
||||
auto_add=False,
|
||||
):
|
||||
from pydal.objects import Table
|
||||
if isinstance(field, Table):
|
||||
field = field._id
|
||||
|
||||
if hasattr(dbset, 'define_table'):
|
||||
self.dbset = dbset()
|
||||
else:
|
||||
self.dbset = dbset
|
||||
|
||||
if isinstance(field, Table):
|
||||
field = field._id
|
||||
elif isinstance(field, str):
|
||||
items = field.split('.')
|
||||
if len(items)==1: items+=['id']
|
||||
field = self.dbset.db[items[0]][items[1]]
|
||||
|
||||
(ktable, kfield) = str(field).split('.')
|
||||
if not label:
|
||||
label = '%%(%s)s' % kfield
|
||||
if isinstance(label, str):
|
||||
if regex1.match(str(label)):
|
||||
label = '%%(%s)s' % str(label).split('.')[-1]
|
||||
ks = regex2.findall(label)
|
||||
if kfield not in ks:
|
||||
ks += [kfield]
|
||||
fields = ks
|
||||
fieldnames = regex2.findall(label)
|
||||
if kfield not in fieldnames:
|
||||
fieldnames.append(kfield) # kfield must be last
|
||||
elif isinstance(label, Field):
|
||||
fieldnames = [label.name, kfield] # kfield must be last
|
||||
label = '%%(%s)s' % label.name
|
||||
elif callable(label):
|
||||
fieldnames = '*'
|
||||
else:
|
||||
ks = [kfield]
|
||||
fields = 'all'
|
||||
self.fields = fields
|
||||
raise NotImplementedError
|
||||
self.field = field # the lookup field
|
||||
self.fieldnames = fieldnames # fields requires to build the formatting
|
||||
self.label = label
|
||||
self.ktable = ktable
|
||||
self.kfield = kfield
|
||||
self.ks = ks
|
||||
self.error_message = error_message
|
||||
self.theset = None
|
||||
self.orderby = orderby
|
||||
@@ -548,6 +561,8 @@ class IS_IN_DB(Validator):
|
||||
self.sort = sort
|
||||
self._and = _and
|
||||
self.left = left
|
||||
self.delimiter = delimiter
|
||||
self.auto_add = auto_add
|
||||
|
||||
def set_self_id(self, id):
|
||||
if self._and:
|
||||
@@ -555,10 +570,10 @@ class IS_IN_DB(Validator):
|
||||
|
||||
def build_set(self):
|
||||
table = self.dbset.db[self.ktable]
|
||||
if self.fields == 'all':
|
||||
if self.fieldnames == '*':
|
||||
fields = [f for f in table]
|
||||
else:
|
||||
fields = [table[k] for k in self.fields]
|
||||
fields = [table[k] for k in self.fieldnames]
|
||||
ignore = (FieldVirtual, FieldMethod)
|
||||
fields = filter(lambda f: not isinstance(f, ignore), fields)
|
||||
if self.dbset.db._dbname != 'gae':
|
||||
@@ -591,18 +606,42 @@ class IS_IN_DB(Validator):
|
||||
items.insert(0, ('', self.zero))
|
||||
return items
|
||||
|
||||
def maybe_add(self, table, fieldname, value):
|
||||
d = {fieldname: value}
|
||||
record = table(**d)
|
||||
if record:
|
||||
return record.id
|
||||
else:
|
||||
return table.insert(**d)
|
||||
|
||||
def __call__(self, value):
|
||||
table = self.dbset.db[self.ktable]
|
||||
field = table[self.kfield]
|
||||
|
||||
if self.multiple:
|
||||
if self._and:
|
||||
raise NotImplementedError
|
||||
if isinstance(value, list):
|
||||
values = value
|
||||
elif self.delimiter:
|
||||
values = value.split(self.delimiter) # because of autocomplete
|
||||
elif value:
|
||||
values = [value]
|
||||
else:
|
||||
values = []
|
||||
|
||||
if self.field.type in ('id','integer'):
|
||||
new_values = []
|
||||
for value in values:
|
||||
if isinstance(value,(int,long)) or value.isdigit():
|
||||
value = int(value)
|
||||
elif self.auto_add:
|
||||
value = self.maybe_add(table, self.fieldnames[0], value)
|
||||
else:
|
||||
return (values, translate(self.error_message))
|
||||
new_values.append(value)
|
||||
values = new_values
|
||||
|
||||
if isinstance(self.multiple, (tuple, list)) and \
|
||||
not self.multiple[0] <= len(values) < self.multiple[1]:
|
||||
return (values, translate(self.error_message))
|
||||
@@ -621,18 +660,32 @@ class IS_IN_DB(Validator):
|
||||
return (values, None)
|
||||
elif count(values) == len(values):
|
||||
return (values, None)
|
||||
elif self.theset:
|
||||
if str(value) in self.theset:
|
||||
if self._and:
|
||||
return self._and(value)
|
||||
else:
|
||||
return (value, None)
|
||||
else:
|
||||
if self.dbset(field == value).count():
|
||||
if self._and:
|
||||
return self._and(value)
|
||||
if self.field.type in ('id','integer'):
|
||||
if isinstance(value,(int,long)) or value.isdigit():
|
||||
value = int(value)
|
||||
elif self.auto_add:
|
||||
value = self.maybe_add(table, self.fieldnames[0], value)
|
||||
else:
|
||||
return (value, None)
|
||||
return (value, translate(self.error_message))
|
||||
|
||||
try:
|
||||
value = int(value)
|
||||
except TypeError:
|
||||
return (values, translate(self.error_message))
|
||||
|
||||
if self.theset:
|
||||
if str(value) in self.theset:
|
||||
if self._and:
|
||||
return self._and(value)
|
||||
else:
|
||||
return (value, None)
|
||||
else:
|
||||
if self.dbset(field == value).count():
|
||||
if self._and:
|
||||
return self._and(value)
|
||||
else:
|
||||
return (value, None)
|
||||
return (value, translate(self.error_message))
|
||||
|
||||
|
||||
@@ -694,7 +747,7 @@ class IS_NOT_IN_DB(Validator):
|
||||
return (value, translate(self.error_message))
|
||||
else:
|
||||
row = subset.select(table._id, field, limitby=(0, 1), orderby_on_limitby=False).first()
|
||||
if row and str(row.id) != str(id):
|
||||
if row and str(row[table._id]) != str(id):
|
||||
return (value, translate(self.error_message))
|
||||
return (value, None)
|
||||
|
||||
@@ -2165,29 +2218,22 @@ class IS_DATE(Validator):
|
||||
INPUT(_type='text', _name='name', requires=IS_DATE())
|
||||
|
||||
date has to be in the ISO8960 format YYYY-MM-DD
|
||||
timezome must be None or a pytz.timezone("America/Chicago") object
|
||||
"""
|
||||
|
||||
def __init__(self, format='%Y-%m-%d',
|
||||
error_message='Enter date as %(format)s',
|
||||
timezone=None):
|
||||
error_message='Enter date as %(format)s'):
|
||||
self.format = translate(format)
|
||||
self.error_message = str(error_message)
|
||||
self.timezone = timezone
|
||||
self.extremes = {}
|
||||
|
||||
def __call__(self, value):
|
||||
ovalue = value
|
||||
if isinstance(value, datetime.date):
|
||||
if self.timezone is not None:
|
||||
value = value - datetime.timedelta(seconds=self.timezone*3600)
|
||||
return (value, None)
|
||||
try:
|
||||
(y, m, d, hh, mm, ss, t0, t1, t2) = \
|
||||
time.strptime(value, str(self.format))
|
||||
value = datetime.date(y, m, d)
|
||||
if self.timezone is not None:
|
||||
value = self.timezone.localize(value).astimezone(utc)
|
||||
return (value, None)
|
||||
except:
|
||||
self.extremes.update(IS_DATETIME.nice(self.format))
|
||||
@@ -2203,11 +2249,7 @@ class IS_DATE(Validator):
|
||||
format = format.replace('%Y', y)
|
||||
if year < 1900:
|
||||
year = 2000
|
||||
if self.timezone is not None:
|
||||
d = datetime.datetime(year, value.month, value.day)
|
||||
d = d.replace(tzinfo=utc).astimezone(self.timezone)
|
||||
else:
|
||||
d = datetime.date(year, value.month, value.day)
|
||||
d = datetime.date(year, value.month, value.day)
|
||||
return d.strftime(format)
|
||||
|
||||
|
||||
@@ -2258,7 +2300,8 @@ class IS_DATETIME(Validator):
|
||||
time.strptime(value, str(self.format))
|
||||
value = datetime.datetime(y, m, d, hh, mm, ss)
|
||||
if self.timezone is not None:
|
||||
value = self.timezone.localize(value).astimezone(utc)
|
||||
# TODO: https://github.com/web2py/web2py/issues/1094 (temporary solution)
|
||||
value = self.timezone.localize(value).astimezone(utc).replace(tzinfo=None)
|
||||
return (value, None)
|
||||
except:
|
||||
self.extremes.update(IS_DATETIME.nice(self.format))
|
||||
@@ -2307,8 +2350,7 @@ class IS_DATE_IN_RANGE(IS_DATE):
|
||||
minimum=None,
|
||||
maximum=None,
|
||||
format='%Y-%m-%d',
|
||||
error_message=None,
|
||||
timezone=None):
|
||||
error_message=None):
|
||||
self.minimum = minimum
|
||||
self.maximum = maximum
|
||||
if error_message is None:
|
||||
@@ -2320,8 +2362,7 @@ class IS_DATE_IN_RANGE(IS_DATE):
|
||||
error_message = "Enter date in range %(min)s %(max)s"
|
||||
IS_DATE.__init__(self,
|
||||
format=format,
|
||||
error_message=error_message,
|
||||
timezone=timezone)
|
||||
error_message=error_message)
|
||||
self.extremes = dict(min=self.formatter(minimum),
|
||||
max=self.formatter(maximum))
|
||||
|
||||
@@ -2847,9 +2888,11 @@ class CRYPT(object):
|
||||
self.salt = salt
|
||||
|
||||
def __call__(self, value):
|
||||
value = value and value[:self.max_length]
|
||||
if len(value) < self.min_length:
|
||||
v = value and str(value)[:self.max_length]
|
||||
if not v or len(v) < self.min_length:
|
||||
return ('', translate(self.error_message))
|
||||
if isinstance(value, LazyCrypt):
|
||||
return (value, None)
|
||||
return (LazyCrypt(self, value), None)
|
||||
|
||||
# entropy calculator for IS_STRONG
|
||||
@@ -3377,7 +3420,8 @@ class IS_IPV4(Validator):
|
||||
(number == self.localhost)):
|
||||
ok = False
|
||||
if not (self.is_private is None or self.is_private ==
|
||||
(sum([number[0] <= number <= number[1] for number in self.private]) > 0)):
|
||||
(sum([private_number[0] <= number <= private_number[1]
|
||||
for private_number in self.private]) > 0)):
|
||||
ok = False
|
||||
if not (self.is_automatic is None or self.is_automatic ==
|
||||
(self.automatic[0] <= number <= self.automatic[1])):
|
||||
@@ -3482,7 +3526,7 @@ class IS_IPV6(Validator):
|
||||
from gluon.contrib import ipaddr as ipaddress
|
||||
|
||||
try:
|
||||
ip = ipaddress.IPv6Address(value)
|
||||
ip = ipaddress.IPv6Address(value.decode('utf-8'))
|
||||
ok = True
|
||||
except ipaddress.AddressValueError:
|
||||
return (value, translate(self.error_message))
|
||||
@@ -3494,7 +3538,7 @@ class IS_IPV6(Validator):
|
||||
self.subnets = [self.subnets]
|
||||
for network in self.subnets:
|
||||
try:
|
||||
ipnet = ipaddress.IPv6Network(network)
|
||||
ipnet = ipaddress.IPv6Network(network.decode('utf-8'))
|
||||
except (ipaddress.NetmaskValueError, ipaddress.AddressValueError):
|
||||
return (value, translate('invalid subnet provided'))
|
||||
if ip in ipnet:
|
||||
@@ -3703,20 +3747,22 @@ class IS_IPADDRESS(Validator):
|
||||
|
||||
def __call__(self, value):
|
||||
try:
|
||||
import ipaddress
|
||||
from ipaddress import ip_address as IPAddress
|
||||
from ipaddress import IPv6Address, IPv4Address
|
||||
except ImportError:
|
||||
from gluon.contrib import ipaddr as ipaddress
|
||||
from gluon.contrib.ipaddr import (IPAddress, IPv4Address,
|
||||
IPv6Address)
|
||||
|
||||
try:
|
||||
ip = ipaddress.IPAddress(value)
|
||||
except ValueError, e:
|
||||
ip = IPAddress(value.decode('utf-8'))
|
||||
except ValueError:
|
||||
return (value, translate(self.error_message))
|
||||
|
||||
if self.is_ipv4 and isinstance(ip, ipaddress.IPv6Address):
|
||||
if self.is_ipv4 and isinstance(ip, IPv6Address):
|
||||
retval = (value, translate(self.error_message))
|
||||
elif self.is_ipv6 and isinstance(ip, ipaddress.IPv4Address):
|
||||
elif self.is_ipv6 and isinstance(ip, IPv4Address):
|
||||
retval = (value, translate(self.error_message))
|
||||
elif self.is_ipv4 or isinstance(ip, ipaddress.IPv4Address):
|
||||
elif self.is_ipv4 or isinstance(ip, IPv4Address):
|
||||
retval = IS_IPV4(
|
||||
minip=self.minip,
|
||||
maxip=self.maxip,
|
||||
@@ -3726,7 +3772,7 @@ class IS_IPADDRESS(Validator):
|
||||
is_automatic=self.is_automatic,
|
||||
error_message=self.error_message
|
||||
)(value)
|
||||
elif self.is_ipv6 or isinstance(ip, ipaddress.IPv6Address):
|
||||
elif self.is_ipv6 or isinstance(ip, IPv6Address):
|
||||
retval = IS_IPV6(
|
||||
is_private=self.is_private,
|
||||
is_link_local=self.is_link_local,
|
||||
|
||||
+30
-20
@@ -40,8 +40,8 @@ ProgramInfo = '''%s
|
||||
%s
|
||||
%s''' % (ProgramName, ProgramAuthor, ProgramVersion)
|
||||
|
||||
if not sys.version[:3] in ['2.5', '2.6', '2.7']:
|
||||
msg = 'Warning: web2py requires Python 2.5, 2.6 or 2.7 but you are running:\n%s'
|
||||
if not sys.version[:3] in ['2.6', '2.7']:
|
||||
msg = 'Warning: web2py requires Python 2.6 or 2.7 but you are running:\n%s'
|
||||
msg = msg % sys.version
|
||||
sys.stderr.write(msg)
|
||||
|
||||
@@ -56,8 +56,8 @@ def run_system_tests(options):
|
||||
major_version = sys.version_info[0]
|
||||
minor_version = sys.version_info[1]
|
||||
if major_version == 2:
|
||||
if minor_version in (5, 6):
|
||||
sys.stderr.write("Python 2.5 or 2.6\n")
|
||||
if minor_version in (6,):
|
||||
sys.stderr.write('Python 2.6\n')
|
||||
ret = subprocess.call(['unit2', '-v', 'gluon.tests'])
|
||||
elif minor_version in (7,):
|
||||
call_args = [sys.executable, '-m', 'unittest', '-v', 'gluon.tests']
|
||||
@@ -150,7 +150,7 @@ class web2pyDialog(object):
|
||||
self.scheduler_processes = {}
|
||||
self.menu = Tkinter.Menu(self.root)
|
||||
servermenu = Tkinter.Menu(self.menu, tearoff=0)
|
||||
httplog = os.path.join(self.options.folder, 'httpserver.log')
|
||||
httplog = os.path.join(self.options.folder, self.options.log_filename)
|
||||
iconphoto = os.path.join('extras', 'icons', 'web2py.gif')
|
||||
if os.path.exists(iconphoto):
|
||||
img = Tkinter.PhotoImage(file=iconphoto)
|
||||
@@ -225,9 +225,9 @@ class web2pyDialog(object):
|
||||
text=str(ProgramVersion + "\n" + ProgramAuthor),
|
||||
font=('Helvetica', 11), justify=Tkinter.CENTER,
|
||||
foreground='#195866', background=bg_color,
|
||||
height=3).pack( side='top',
|
||||
fill='both',
|
||||
expand='yes')
|
||||
height=3).pack(side='top',
|
||||
fill='both',
|
||||
expand='yes')
|
||||
|
||||
self.bannerarea.after(1000, self.update_canvas)
|
||||
|
||||
@@ -322,11 +322,15 @@ class web2pyDialog(object):
|
||||
self.tb = None
|
||||
|
||||
def update_schedulers(self, start=False):
|
||||
applications_folder = os.path.join(self.options.folder, 'applications')
|
||||
apps = []
|
||||
available_apps = [arq for arq in os.listdir('applications/')]
|
||||
available_apps = [arq for arq in available_apps
|
||||
if os.path.exists(
|
||||
'applications/%s/models/scheduler.py' % arq)]
|
||||
##FIXME - can't start scheduler in the correct dir from Tk
|
||||
if self.options.folder:
|
||||
return
|
||||
available_apps = [
|
||||
arq for arq in os.listdir(applications_folder)
|
||||
if os.path.exists(os.path.join(applications_folder, arq, 'models', 'scheduler.py'))
|
||||
]
|
||||
if start:
|
||||
# the widget takes care of starting the scheduler
|
||||
if self.options.scheduler and self.options.with_scheduler:
|
||||
@@ -414,9 +418,11 @@ class web2pyDialog(object):
|
||||
def connect_pages(self):
|
||||
""" Connects pages """
|
||||
# reset the menu
|
||||
available_apps = [arq for arq in os.listdir('applications/')
|
||||
if os.path.exists(
|
||||
'applications/%s/__init__.py' % arq)]
|
||||
applications_folder = os.path.join(self.options.folder, 'applications')
|
||||
available_apps = [
|
||||
arq for arq in os.listdir(applications_folder)
|
||||
if os.path.exists(os.path.join(applications_folder, arq, '__init__.py'))
|
||||
]
|
||||
self.pagesmenu.delete(0, len(available_apps))
|
||||
for arq in available_apps:
|
||||
url = self.url + arq
|
||||
@@ -552,14 +558,15 @@ class web2pyDialog(object):
|
||||
def update_canvas(self):
|
||||
""" Updates canvas """
|
||||
|
||||
httplog = os.path.join(self.options.folder, self.options.log_filename)
|
||||
try:
|
||||
t1 = os.path.getsize('httpserver.log')
|
||||
t1 = os.path.getsize(httplog)
|
||||
except:
|
||||
self.canvas.after(1000, self.update_canvas)
|
||||
return
|
||||
|
||||
try:
|
||||
fp = open('httpserver.log', 'r')
|
||||
fp = open(httplog, 'r')
|
||||
fp.seek(self.t0)
|
||||
data = fp.read(t1 - self.t0)
|
||||
fp.close()
|
||||
@@ -1051,6 +1058,8 @@ def start_schedulers(options):
|
||||
apps = options.scheduler_groups
|
||||
code = "from gluon import current;current._scheduler.loop()"
|
||||
logging.getLogger().setLevel(options.debuglevel)
|
||||
if options.folder:
|
||||
os.chdir(options.folder)
|
||||
if len(apps) == 1 and not options.with_scheduler:
|
||||
app_, code = get_code_for_scheduler(apps[0], options)
|
||||
if not app_:
|
||||
@@ -1117,11 +1126,12 @@ def start(cron=True):
|
||||
if hasattr(options, key):
|
||||
setattr(options, key, getattr(options2, key))
|
||||
|
||||
logfile0 = os.path.join('extras', 'examples', 'logging.example.conf')
|
||||
if not os.path.exists('logging.conf') and os.path.exists(logfile0):
|
||||
logfile0 = os.path.join('examples', 'logging.example.conf')
|
||||
logfile1 = os.path.join(options.folder, 'logging.conf')
|
||||
if not os.path.exists(logfile1) and os.path.exists(logfile0):
|
||||
import shutil
|
||||
sys.stdout.write("Copying logging.conf.example to logging.conf ... ")
|
||||
shutil.copyfile('logging.example.conf', logfile0)
|
||||
shutil.copyfile(logfile0, logfile1)
|
||||
sys.stdout.write("OK\n")
|
||||
|
||||
# ## if -T run doctests (no cron)
|
||||
|
||||
@@ -74,7 +74,7 @@ class ServiceBase(Base):
|
||||
key = config['https_key']
|
||||
cert = config['https_cert']
|
||||
if key != '' and cert != '':
|
||||
interfaces.append('%s:%s:%s:%s' % (ip, port, cert, key))
|
||||
interfaces.append('%s:%s:%s:%s' % (ip, port, key, cert))
|
||||
ports.append(ports)
|
||||
if len(interfaces) == 0:
|
||||
sys.exit('Configuration error. Must have settings for http and/or https')
|
||||
@@ -92,7 +92,7 @@ class ServiceBase(Base):
|
||||
interfaces = ';'.join(interfaces)
|
||||
args.append('--interfaces=%s' % interfaces)
|
||||
|
||||
if 'log_filename' in config.key():
|
||||
if 'log_filename' in config.keys():
|
||||
log_filename = config['log_filename']
|
||||
args.append('--log_filename=%s' % log_filename)
|
||||
|
||||
|
||||
@@ -195,7 +195,7 @@ NameVirtualHost *:80
|
||||
NameVirtualHost *:443
|
||||
|
||||
<VirtualHost *:80>
|
||||
WSGIDaemonProcess web2py user=apache group=apache processes=1 threads=1
|
||||
WSGIDaemonProcess web2py user=apache group=apache
|
||||
WSGIProcessGroup web2py
|
||||
WSGIScriptAlias / /opt/web-apps/web2py/wsgihandler.py
|
||||
WSGIPassAuthorization On
|
||||
|
||||
@@ -299,7 +299,7 @@ NameVirtualHost *:80
|
||||
NameVirtualHost *:443
|
||||
|
||||
<VirtualHost *:80>
|
||||
WSGIDaemonProcess web2py user=apache group=apache processes=1 threads=1
|
||||
WSGIDaemonProcess web2py user=apache group=apache
|
||||
WSGIProcessGroup web2py
|
||||
WSGIScriptAlias / /opt/web-apps/web2py/wsgihandler.py
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#!/bin/bash
|
||||
echo "This script will:
|
||||
1) Install modules needed to run web2py on Fedora and CentOS/RHEL
|
||||
2) Install Python 2.6 to /opt and recompile wsgi if not provided
|
||||
@@ -27,7 +28,7 @@ Press ENTER to continue...[ctrl+C to abort]"
|
||||
|
||||
read CONFIRM
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
###
|
||||
### Phase 0 - This may get messy. Lets work from a temporary directory
|
||||
@@ -301,7 +302,7 @@ NameVirtualHost *:80
|
||||
NameVirtualHost *:443
|
||||
|
||||
<VirtualHost *:80>
|
||||
WSGIDaemonProcess web2py user=apache group=apache processes=1 threads=1
|
||||
WSGIDaemonProcess web2py user=apache group=apache
|
||||
WSGIProcessGroup web2py
|
||||
WSGIScriptAlias / /opt/web-apps/web2py/wsgihandler.py
|
||||
WSGIPassAuthorization On
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#!/bin/bash
|
||||
echo "This script will:
|
||||
1) install all modules need to run web2py on Ubuntu 14.04
|
||||
2) install web2py in /home/www-data/
|
||||
@@ -12,7 +13,7 @@ Press a key to continue...[ctrl+C to abort]"
|
||||
|
||||
read CONFIRM
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# optional
|
||||
# dpkg-reconfigure console-setup
|
||||
# dpkg-reconfigure timezoneconf
|
||||
@@ -84,13 +85,31 @@ openssl x509 -noout -fingerprint -text < /etc/apache2/ssl/self_signed.cert > /et
|
||||
echo "rewriting your apache config file to use mod_wsgi"
|
||||
echo "================================================="
|
||||
echo '
|
||||
WSGIDaemonProcess web2py user=www-data group=www-data processes=1 threads=1
|
||||
WSGIDaemonProcess web2py user=www-data group=www-data
|
||||
|
||||
<VirtualHost *:80>
|
||||
|
||||
RewriteEngine On
|
||||
RewriteCond %{HTTPS} !=on
|
||||
RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L]
|
||||
WSGIProcessGroup web2py
|
||||
WSGIScriptAlias / /home/www-data/web2py/wsgihandler.py
|
||||
WSGIPassAuthorization On
|
||||
|
||||
<Directory /home/www-data/web2py>
|
||||
AllowOverride None
|
||||
Require all denied
|
||||
<Files wsgihandler.py>
|
||||
Require all granted
|
||||
</Files>
|
||||
</Directory>
|
||||
|
||||
AliasMatch ^/([^/]+)/static/(?:_[\d]+.[\d]+.[\d]+/)?(.*) \
|
||||
/home/www-data/web2py/applications/$1/static/$2
|
||||
|
||||
<Directory /home/www-data/web2py/applications/*/static/>
|
||||
Options -Indexes
|
||||
ExpiresActive On
|
||||
ExpiresDefault "access plus 1 hour"
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
CustomLog /var/log/apache2/access.log common
|
||||
ErrorLog /var/log/apache2/error.log
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from multiprocessing import freeze_support
|
||||
# import gluon.import_all ##### This should be uncommented for py2exe.py
|
||||
|
||||
if hasattr(sys, 'frozen'):
|
||||
path = os.path.dirname(os.path.abspath(sys.executable)) # for py2exe
|
||||
@@ -14,17 +16,14 @@ os.chdir(path)
|
||||
|
||||
sys.path = [path] + [p for p in sys.path if not p == path]
|
||||
|
||||
# import gluon.import_all ##### This should be uncommented for py2exe.py
|
||||
# important that this import is after the os.chdir
|
||||
|
||||
import gluon.widget
|
||||
|
||||
# Start Web2py and Web2py cron service!
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
from multiprocessing import freeze_support
|
||||
freeze_support()
|
||||
except:
|
||||
sys.stderr.write('Sorry, -K only supported for python 2.6-2.7\n')
|
||||
if os.environ.has_key("COVERAGE_PROCESS_START"):
|
||||
freeze_support()
|
||||
if 'COVERAGE_PROCESS_START' in os.environ:
|
||||
try:
|
||||
import coverage
|
||||
coverage.process_startup()
|
||||
|
||||
Reference in New Issue
Block a user