Compare commits
222 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5bc5d0496e | |||
| fe34d78578 | |||
| 35840bc572 | |||
| f840cdae5f | |||
| b36c38cc88 | |||
| 5958704509 | |||
| 0aa58c5f93 | |||
| c6cc06f6c0 | |||
| bda69b0e88 | |||
| 0cfbab6206 | |||
| 385bcf6988 | |||
| aaf1dd614a | |||
| 1c2358671d | |||
| 181546e49d | |||
| 57c5fb64f6 | |||
| 15bf3e2ede | |||
| b872cced33 | |||
| 2af5e02c5f | |||
| 1c281cc163 | |||
| e9de0766bc | |||
| 9e555ed4b5 | |||
| a78dce6778 | |||
| 01a0a4eb67 | |||
| c5c5b5708e | |||
| 6be1f624b9 | |||
| daf382c4fb | |||
| f76a780d50 | |||
| 15c3ac1cb9 | |||
| 3d4de72b9c | |||
| 8c1ca50205 | |||
| eb8cc3fc76 | |||
| 83b94b8207 | |||
| f396094daf | |||
| 16c0e1a4b8 | |||
| 1815864a67 | |||
| 261490c082 | |||
| ef5913a519 | |||
| 997b877766 | |||
| 132dfbcb19 | |||
| 3e1a918707 | |||
| e6de16b111 | |||
| 354e63d0fe | |||
| d7bc489e71 | |||
| 1a85953325 | |||
| 3b655c4b71 | |||
| 675b7e356f | |||
| 1db3758980 | |||
| c0b32eaeec | |||
| 80261f52ed | |||
| 29661ad881 | |||
| 81fbc2ea2f | |||
| 3da506eec7 | |||
| cbe37bf602 | |||
| 75a32a1cde | |||
| 528c27f852 | |||
| c5f699ebad | |||
| 3c87c84578 | |||
| e92a581c73 | |||
| 28d07ef471 | |||
| 687f9d0fd9 | |||
| 3051135774 | |||
| ce025a6b8e | |||
| 207f53fd6f | |||
| 39af574e7f | |||
| 9825bbc926 | |||
| 9132343820 | |||
| 9b490340e5 | |||
| 952890d9cc | |||
| 8d72074209 | |||
| d93810697f | |||
| 23ee6bd2cf | |||
| c0536d3b74 | |||
| 0024307e6c | |||
| 886f84778c | |||
| 5ea654ed06 | |||
| de55a729dc | |||
| 199e719838 | |||
| 53e9e3b3e0 | |||
| 0049f9e0c2 | |||
| fd6c36e5f0 | |||
| 3e1037a73a | |||
| a97ec075da | |||
| 4839df37e8 | |||
| 47165ed3b7 | |||
| 654cb650fb | |||
| aaa17250d9 | |||
| e9a89eff82 | |||
| a01dbbab49 | |||
| 5474c68994 | |||
| bda101d43f | |||
| 058930d42b | |||
| 2a8c04c69f | |||
| f64098af14 | |||
| b6993f7cc4 | |||
| 6228de8e10 | |||
| dff6bfb5b9 | |||
| a1524d4da4 | |||
| 3b9a5ee3b5 | |||
| 556609f5a2 | |||
| dafe900629 | |||
| 7a6bdf7cbd | |||
| a82d3f88b6 | |||
| 64e90a7250 | |||
| e36a1657fc | |||
| 58284e3674 | |||
| 5a83c3e6b7 | |||
| a22d5a4685 | |||
| 22506a6b03 | |||
| db9eeee1c3 | |||
| 5449f04148 | |||
| 9f405b2ab7 | |||
| e315db5cd8 | |||
| a6b50dcdcd | |||
| 67ba09af37 | |||
| 894babaed3 | |||
| c071bc964b | |||
| 63d8785918 | |||
| 4a4f22b654 | |||
| b906177efc | |||
| 7f1f6ae35f | |||
| f299205869 | |||
| 41deff244c | |||
| 7aa51fcbb0 | |||
| b9fe941dcc | |||
| b7b94ca6b5 | |||
| fc19a4dd39 | |||
| ac53ef12e3 | |||
| a3a3936d3a | |||
| 8eef404e29 | |||
| 50540b2f97 | |||
| 8ec68e393a | |||
| 98f245655b | |||
| fc38f460eb | |||
| 643748db02 | |||
| 274634a71a | |||
| 2b8add6778 | |||
| ba374dea2c | |||
| 85a0e8f1b0 | |||
| 794979abe1 | |||
| 36010cb86e | |||
| f10b1b93a9 | |||
| b2401a5923 | |||
| 9d4b2e66c4 | |||
| 9076971d75 | |||
| 64ae27862a | |||
| bd05dc68ea | |||
| ab14cc626b | |||
| a2347f54d6 | |||
| 601e928438 | |||
| 9091a5af25 | |||
| 8804a9ed77 | |||
| 99ddeb65fe | |||
| ca1092efd3 | |||
| f988f381f2 | |||
| ed25027499 | |||
| 4892bbe0bd | |||
| 39dec30f52 | |||
| 1f3030c75a | |||
| 4211c38ac5 | |||
| 0955bc0967 | |||
| ad5b9da4f4 | |||
| 1c7153c985 | |||
| 4448f01e47 | |||
| 297961739b | |||
| 7aa703dc5e | |||
| eeb06ce14f | |||
| 547ec7200e | |||
| 6cc55abc42 | |||
| d33091d76b | |||
| e730b11b78 | |||
| 989a635dbb | |||
| 7e0e7eb6c8 | |||
| b43ef65eb1 | |||
| b616ee6a32 | |||
| f255da79f2 | |||
| 45a689a812 | |||
| 4df82d3a6e | |||
| 15fe54bdca | |||
| b2bc1835c3 | |||
| 617abda1cc | |||
| 50662b6acc | |||
| c9494e2757 | |||
| 4e110c691f | |||
| 4cf878c9f7 | |||
| b36ab988cc | |||
| 97e1d1cd9b | |||
| 16da2edc6d | |||
| 88113637ae | |||
| 1e35262e67 | |||
| 05689aa526 | |||
| ae5069d9b1 | |||
| 7c8d91d4c5 | |||
| 47d9d47cff | |||
| ed4febf9db | |||
| 78764072fe | |||
| eaf358765a | |||
| 7b6f2bf896 | |||
| 11082987ea | |||
| db68a2a10e | |||
| b2d5775f82 | |||
| e40937bd8b | |||
| db01261c35 | |||
| 6b38fb769b | |||
| 8251aebdc5 | |||
| 6e9eeb50bc | |||
| 502327e531 | |||
| 5c07c511fa | |||
| 0721988b65 | |||
| f17493b52c | |||
| 4c45de7efd | |||
| c3f6fc8db8 | |||
| 4d42442c31 | |||
| 34a417cfa0 | |||
| bb199ad533 | |||
| be07572572 | |||
| 947dcbc226 | |||
| 8bbd22eba8 | |||
| 50f16744a7 | |||
| b20b81b8f5 | |||
| 7fde332392 | |||
| 625e4849ef | |||
| 5e5e649c28 |
@@ -49,6 +49,8 @@ applications/*/errors/*
|
||||
applications/*/cache/*
|
||||
applications/*/uploads/*
|
||||
applications/*/*.py[oc]
|
||||
applications/*/static/temp
|
||||
applications/*/progress.log
|
||||
applications/examples/static/epydoc
|
||||
applications/examples/static/sphinx
|
||||
applications/admin/cron/cron.master
|
||||
|
||||
@@ -22,6 +22,8 @@ before_script:
|
||||
- if [[ $TRAVIS_PYTHON_VERSION == '2.5' ]]; then pip install pysqlite; fi
|
||||
- if [[ $DB == mysql* ]]; then mysql -e 'create database test_w2p;'; fi
|
||||
- if [[ $DB == postgres* ]]; then psql -c 'create database test_w2p;' -U postgres; fi
|
||||
- if [[ $DB == postgres* ]]; then psql -c 'create extension postgis;' -U postgres -d test_w2p; fi
|
||||
|
||||
|
||||
# Install last sdk for app engine (update only whenever a new release is available)
|
||||
- if [[ $DB == google* ]]; then wget http://googleappengine.googlecode.com/files/google_appengine_1.8.9.zip -nv; fi
|
||||
|
||||
@@ -1,3 +1,21 @@
|
||||
## 2.9.12
|
||||
|
||||
- Tornado HTTPS support, thanks Diego
|
||||
- Modular DAL, thanks Giovanni
|
||||
- Added coverage support, thanks Niphlod
|
||||
- More tests, thanks Niphlod and Paolo Valleri
|
||||
- Added support for show_if in readonly sqlform, thanks Paolo
|
||||
- Improved scheduler, thanks Niphlod
|
||||
- Email timeout support
|
||||
- Made web2py's custom_import work with circular imports, thanks Jack Kuan
|
||||
- Added Portuguese, Catalan, and Burmese translations
|
||||
- Allow map_hyphen to work for application names, thanks Tim Nyborg
|
||||
- New module appconfig.py, thanks Niphlod
|
||||
- Added geospatial support to Teradata adaptor, thanks Andrew Willimott
|
||||
- Many bug fixes
|
||||
|
||||
|
||||
|
||||
## 2.9.6 - 2.9.10
|
||||
|
||||
- fixed support of GAE + SQL
|
||||
|
||||
@@ -24,13 +24,20 @@ epydoc:
|
||||
cp applications/examples/static/title.png applications/examples/static/epydoc
|
||||
tests:
|
||||
python web2py.py --run_system_tests
|
||||
coverage:
|
||||
coverage erase --rcfile=gluon/tests/coverage.ini
|
||||
export COVERAGE_PROCESS_START=gluon/tests/coverage.ini
|
||||
python web2py.py --run_system_tests --with_coverage
|
||||
coverage combine --rcfile=gluon/tests/coverage.ini
|
||||
sleep 1
|
||||
coverage html --rcfile=gluon/tests/coverage.ini
|
||||
update:
|
||||
wget -O gluon/contrib/feedparser.py http://feedparser.googlecode.com/svn/trunk/feedparser/feedparser.py
|
||||
wget -O gluon/contrib/simplejsonrpc.py http://rad2py.googlecode.com/hg/ide2py/simplejsonrpc.py
|
||||
echo "remember that pymysql was tweaked"
|
||||
src:
|
||||
### Use semantic versioning
|
||||
echo 'Version 2.9.11-stable+timestamp.'`date +%Y.%m.%d.%H.%M.%S` > VERSION
|
||||
echo 'Version 2.9.12-stable+timestamp.'`date +%Y.%m.%d.%H.%M.%S` > VERSION
|
||||
### rm -f all junk files
|
||||
make clean
|
||||
### clean up baisc apps
|
||||
@@ -54,7 +61,7 @@ src:
|
||||
### build web2py_src.zip
|
||||
echo '' > NEWINSTALL
|
||||
mv web2py_src.zip web2py_src_old.zip | echo 'no old'
|
||||
cd ..; zip -r web2py/web2py_src.zip web2py/web2py.py web2py/anyserver.py web2py/gluon/*.py web2py/gluon/contrib/* web2py/extras/* web2py/handlers/* web2py/examples/* web2py/README.markdown web2py/LICENSE web2py/CHANGELOG web2py/NEWINSTALL web2py/VERSION web2py/MANIFEST.in web2py/scripts/*.sh web2py/scripts/*.py web2py/applications/admin web2py/applications/examples/ web2py/applications/welcome web2py/applications/__init__.py web2py/site-packages/__init__.py web2py/gluon/tests/*.sh web2py/gluon/tests/*.py
|
||||
cd ..; zip -r web2py/web2py_src.zip web2py/web2py.py web2py/anyserver.py web2py/gluon/*.py web2py/gluon/dal/* web2py/gluon/contrib/* web2py/extras/* web2py/handlers/* web2py/examples/* web2py/README.markdown web2py/LICENSE web2py/CHANGELOG web2py/NEWINSTALL web2py/VERSION web2py/MANIFEST.in web2py/scripts/*.sh web2py/scripts/*.py web2py/applications/admin web2py/applications/examples/ web2py/applications/welcome web2py/applications/__init__.py web2py/site-packages/__init__.py web2py/gluon/tests/*.sh web2py/gluon/tests/*.py
|
||||
|
||||
mdp:
|
||||
make src
|
||||
|
||||
+3
-3
@@ -15,13 +15,13 @@ Then edit ./app.yaml and replace "yourappname" with yourappname.
|
||||
|
||||
## Documentation (readthedocs.org)
|
||||
|
||||
[](http://web2py.rtfd.org/)
|
||||
[](http://web2py.rtfd.org/)
|
||||
|
||||
## Tests
|
||||
|
||||
[](https://travis-ci.org/web2py/web2py)
|
||||
[](https://travis-ci.org/web2py/web2py)
|
||||
|
||||
[](https://coveralls.io/r/web2py/web2py)
|
||||
[](https://coveralls.io/r/web2py/web2py)
|
||||
|
||||
## Installation Instructions
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
Version 2.9.11-stable+timestamp.2014.09.15.18.31.17
|
||||
Version 2.9.12-stable+timestamp.2015.01.17.00.07.04
|
||||
|
||||
@@ -461,34 +461,24 @@ def ccache():
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
folder = os.path.join(request.folder,'cache')
|
||||
if not os.path.exists(folder):
|
||||
os.mkdir(folder)
|
||||
locker = open(os.path.join(folder, 'cache.lock'), 'a')
|
||||
portalocker.lock(locker, portalocker.LOCK_EX)
|
||||
disk_storage = shelve.open(
|
||||
os.path.join(folder, 'cache.shelve'))
|
||||
try:
|
||||
for key, value in disk_storage.items():
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
disk['bytes'] += hp.iso(value[1]).size
|
||||
disk['objects'] += hp.iso(value[1]).count
|
||||
disk['entries'] += 1
|
||||
if value[0] < disk['oldest']:
|
||||
disk['oldest'] = value[0]
|
||||
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
finally:
|
||||
portalocker.unlock(locker)
|
||||
locker.close()
|
||||
disk_storage.close()
|
||||
|
||||
for key in cache.disk.storage:
|
||||
value = cache.disk.storage[key]
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
disk['bytes'] += hp.iso(value[1]).size
|
||||
disk['objects'] += hp.iso(value[1]).count
|
||||
disk['entries'] += 1
|
||||
if value[0] < disk['oldest']:
|
||||
disk['oldest'] = value[0]
|
||||
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
|
||||
total['entries'] = ram['entries'] + disk['entries']
|
||||
total['bytes'] = ram['bytes'] + disk['bytes']
|
||||
@@ -667,3 +657,42 @@ def manage():
|
||||
kwargs.update(**smartgrid_args.get(table._tablename, {}))
|
||||
grid = SQLFORM.smartgrid(table, args=request.args[:2], formname=formname, **kwargs)
|
||||
return grid
|
||||
|
||||
def hooks():
|
||||
import functools
|
||||
import inspect
|
||||
list_op=['_%s_%s' %(h,m) for h in ['before', 'after'] for m in ['insert','update','delete']]
|
||||
tables=[]
|
||||
with_build_it=False
|
||||
for db_str in sorted(databases):
|
||||
db = databases[db_str]
|
||||
for t in db.tables:
|
||||
method_hooks=[]
|
||||
for op in list_op:
|
||||
functions = []
|
||||
for f in getattr(db[t], op):
|
||||
if hasattr(f, '__call__'):
|
||||
if isinstance(f, (functools.partial)):
|
||||
f = f.func
|
||||
filename = inspect.getsourcefile(f)
|
||||
details = {'funcname':f.__name__,
|
||||
'filename':filename[len(request.folder):] if request.folder in filename else None,
|
||||
'lineno': inspect.getsourcelines(f)[1]}
|
||||
if details['filename']: # Built in functions as delete_uploaded_files are not editable
|
||||
details['url'] = URL(a='admin',c='default',f='edit', args=[request['application'], details['filename']],vars={'lineno':details['lineno']})
|
||||
if details['filename'] or with_build_it:
|
||||
functions.append(details)
|
||||
if len(functions):
|
||||
method_hooks.append({'name':op, 'functions':functions})
|
||||
if len(method_hooks):
|
||||
tables.append({'name':"%s.%s" % (db_str,t), 'slug': IS_SLUG()("%s.%s" % (db_str,t))[0], 'method_hooks':method_hooks})
|
||||
# Render
|
||||
ul_main = UL(_class='nav nav-list')
|
||||
for t in tables:
|
||||
ul_main.append(A(t['name'], _onclick="collapse('a_%s')" % t['slug']))
|
||||
ul_t = UL(_class='nav nav-list', _id="a_%s" % t['slug'], _style='display:none')
|
||||
for op in t['method_hooks']:
|
||||
ul_t.append(LI (op['name']))
|
||||
ul_t.append(UL([LI(A(f['funcname'], _class="editor_filelink", _href=f['url']if 'url' in f else None, **{'_data-lineno':f['lineno']-1})) for f in op['functions']]))
|
||||
ul_main.append(ul_t)
|
||||
return ul_main
|
||||
|
||||
@@ -72,8 +72,7 @@ def interact():
|
||||
f_globals = {}
|
||||
for name, value in env['globals'].items():
|
||||
if name not in gluon.html.__all__ and \
|
||||
name not in gluon.validators.__all__ and \
|
||||
name not in gluon.dal.__all__:
|
||||
name not in gluon.validators.__all__:
|
||||
f_globals[name] = pydoc.text.repr(value)
|
||||
else:
|
||||
f_locals = {}
|
||||
|
||||
@@ -589,7 +589,7 @@ def edit():
|
||||
if 'settings' in request.vars:
|
||||
if request.post_vars: #save new preferences
|
||||
post_vars = request.post_vars.items()
|
||||
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
|
||||
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
|
||||
post_vars+= [(opt, 'false') for opt in preferences if opt not in request.post_vars ]
|
||||
if config.save(post_vars):
|
||||
response.headers["web2py-component-flash"] = T('Preferences saved correctly')
|
||||
@@ -775,12 +775,12 @@ def edit():
|
||||
view_link=view_link,
|
||||
editviewlinks=editviewlinks,
|
||||
id=IS_SLUG()(filename)[0],
|
||||
force= True if (request.vars.restore or
|
||||
force= True if (request.vars.restore or
|
||||
request.vars.revert) else False)
|
||||
plain_html = response.render('default/edit_js.html', file_details)
|
||||
file_details['plain_html'] = plain_html
|
||||
if is_mobile:
|
||||
return response.render('default.mobile/edit.html',
|
||||
return response.render('default.mobile/edit.html',
|
||||
file_details, editor_settings=preferences)
|
||||
else:
|
||||
return response.json(file_details)
|
||||
@@ -1278,7 +1278,7 @@ def create_file():
|
||||
path = abspath(request.vars.location)
|
||||
else:
|
||||
if request.vars.dir:
|
||||
request.vars.location += request.vars.dir + '/'
|
||||
request.vars.location += request.vars.dir + '/'
|
||||
app = get_app(name=request.vars.location.split('/')[0])
|
||||
path = apath(request.vars.location, r=request)
|
||||
filename = re.sub('[^\w./-]+', '_', request.vars.filename)
|
||||
@@ -1387,7 +1387,7 @@ def create_file():
|
||||
from gluon import *\n""")[1:]
|
||||
|
||||
elif (path[-8:] == '/static/') or (path[-9:] == '/private/'):
|
||||
if (request.vars.plugin and
|
||||
if (request.vars.plugin and
|
||||
not filename.startswith('plugin_%s/' % request.vars.plugin)):
|
||||
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
|
||||
text = ''
|
||||
@@ -1434,37 +1434,37 @@ def create_file():
|
||||
""" % URL('edit', args=[app,request.vars.dir,filename])
|
||||
return ''
|
||||
else:
|
||||
redirect(request.vars.sender + anchor)
|
||||
redirect(request.vars.sender + anchor)
|
||||
|
||||
|
||||
def listfiles(app, dir, regexp='.*\.py$'):
|
||||
files = sorted(
|
||||
files = sorted(
|
||||
listdir(apath('%(app)s/%(dir)s/' % {'app':app, 'dir':dir}, r=request), regexp))
|
||||
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
|
||||
return files
|
||||
|
||||
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
|
||||
return files
|
||||
|
||||
def editfile(path,file,vars={}, app = None):
|
||||
args=(path,file) if 'app' in vars else (app,path,file)
|
||||
url = URL('edit', args=args, vars=vars)
|
||||
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
|
||||
|
||||
args=(path,file) if 'app' in vars else (app,path,file)
|
||||
url = URL('edit', args=args, vars=vars)
|
||||
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
|
||||
|
||||
def files_menu():
|
||||
app = request.vars.app or 'welcome'
|
||||
dirs=[{'name':'models', 'reg':'.*\.py$'},
|
||||
app = request.vars.app or 'welcome'
|
||||
dirs=[{'name':'models', 'reg':'.*\.py$'},
|
||||
{'name':'controllers', 'reg':'.*\.py$'},
|
||||
{'name':'views', 'reg':'[\w/\-]+(\.\w+)+$'},
|
||||
{'name':'modules', 'reg':'.*\.py$'},
|
||||
{'name':'static', 'reg': '[^\.#].*'},
|
||||
{'name':'private', 'reg':'.*\.py$'}]
|
||||
result_files = []
|
||||
for dir in dirs:
|
||||
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
|
||||
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.','__')), app), _style="overflow:hidden", _id=dir['name']+"__"+f.replace('.','__'))
|
||||
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
|
||||
_class="nav nav-list small-font"),
|
||||
_id=dir['name'] + '_files', _style="display: none;")))
|
||||
return dict(result_files = result_files)
|
||||
|
||||
result_files = []
|
||||
for dir in dirs:
|
||||
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
|
||||
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.','__')), app), _style="overflow:hidden", _id=dir['name']+"__"+f.replace('.','__'))
|
||||
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
|
||||
_class="nav nav-list small-font"),
|
||||
_id=dir['name'] + '_files', _style="display: none;")))
|
||||
return dict(result_files = result_files)
|
||||
|
||||
def upload_file():
|
||||
""" File uploading handler """
|
||||
if request.vars and not request.vars.token == session.token:
|
||||
@@ -1941,4 +1941,3 @@ def install_plugin():
|
||||
T('unable to install plugin "%s"', filename)
|
||||
redirect(URL(f="plugins", args=[app,]))
|
||||
return dict(form=form, app=app, plugin=plugin, source=source)
|
||||
|
||||
|
||||
+480
-480
@@ -1,480 +1,480 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'cs-cz',
|
||||
'!langname!': 'čeština',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': 'Kolonka "Upravit" je nepovinný výraz, například "pole1=\'nováhodnota\'". Výsledky databázového JOINu nemůžete mazat ani upravovat.',
|
||||
'"User Exception" debug mode. An error ticket could be issued!': '"User Exception" debug mode. An error ticket could be issued!',
|
||||
'%%{Row} in Table': '%%{řádek} v tabulce',
|
||||
'%%{Row} selected': 'označených %%{řádek}',
|
||||
'%s %%{row} deleted': '%s smazaných %%{záznam}',
|
||||
'%s %%{row} updated': '%s upravených %%{záznam}',
|
||||
'%s selected': '%s označených',
|
||||
'%Y-%m-%d': '%d.%m.%Y',
|
||||
'%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S',
|
||||
'(requires internet access)': '(vyžaduje připojení k internetu)',
|
||||
'(requires internet access, experimental)': '(requires internet access, experimental)',
|
||||
'(something like "it-it")': '(například "cs-cs")',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)',
|
||||
'@markmin\x01Searching: **%s** %%{file}': 'Hledání: **%s** %%{soubor}',
|
||||
'About': 'O programu',
|
||||
'About application': 'O aplikaci',
|
||||
'Access Control': 'Řízení přístupu',
|
||||
'Add breakpoint': 'Přidat bod přerušení',
|
||||
'Additional code for your application': 'Další kód pro Vaši aplikaci',
|
||||
'Admin design page': 'Admin design page',
|
||||
'Admin language': 'jazyk rozhraní',
|
||||
'Administrative interface': 'pro administrátorské rozhraní klikněte sem',
|
||||
'Administrative Interface': 'Administrátorské rozhraní',
|
||||
'administrative interface': 'rozhraní pro správu',
|
||||
'Administrator Password:': 'Administrátorské heslo:',
|
||||
'Ajax Recipes': 'Recepty s ajaxem',
|
||||
'An error occured, please %s the page': 'An error occured, please %s the page',
|
||||
'and rename it:': 'a přejmenovat na:',
|
||||
'appadmin': 'appadmin',
|
||||
'appadmin is disabled because insecure channel': 'appadmin je zakázaná bez zabezpečeného spojení',
|
||||
'Application': 'Application',
|
||||
'application "%s" uninstalled': 'application "%s" odinstalována',
|
||||
'application compiled': 'aplikace zkompilována',
|
||||
'Application name:': 'Název aplikace:',
|
||||
'are not used': 'nepoužita',
|
||||
'are not used yet': 'ještě nepoužita',
|
||||
'Are you sure you want to delete this object?': 'Opravdu chcete odstranit tento objekt?',
|
||||
'Are you sure you want to uninstall application "%s"?': 'Opravdu chcete odinstalovat aplikaci "%s"?',
|
||||
'arguments': 'arguments',
|
||||
'at char %s': 'at char %s',
|
||||
'at line %s': 'at line %s',
|
||||
'ATTENTION:': 'ATTENTION:',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
|
||||
'Available Databases and Tables': 'Dostupné databáze a tabulky',
|
||||
'back': 'zpět',
|
||||
'Back to wizard': 'Back to wizard',
|
||||
'Basics': 'Basics',
|
||||
'Begin': 'Začít',
|
||||
'breakpoint': 'bod přerušení',
|
||||
'Breakpoints': 'Body přerušení',
|
||||
'breakpoints': 'body přerušení',
|
||||
'Buy this book': 'Koupit web2py knihu',
|
||||
'Cache': 'Cache',
|
||||
'cache': 'cache',
|
||||
'Cache Keys': 'Klíče cache',
|
||||
'cache, errors and sessions cleaned': 'cache, chyby a relace byly pročištěny',
|
||||
'can be a git repo': 'může to být git repo',
|
||||
'Cancel': 'Storno',
|
||||
'Cannot be empty': 'Nemůže být prázdné',
|
||||
'Change Admin Password': 'Změnit heslo pro správu',
|
||||
'Change admin password': 'Změnit heslo pro správu aplikací',
|
||||
'Change password': 'Změna hesla',
|
||||
'check all': 'vše označit',
|
||||
'Check for upgrades': 'Zkusit aktualizovat',
|
||||
'Check to delete': 'Označit ke smazání',
|
||||
'Check to delete:': 'Označit ke smazání:',
|
||||
'Checking for upgrades...': 'Zjišťuji, zda jsou k dispozici aktualizace...',
|
||||
'Clean': 'Pročistit',
|
||||
'Clear CACHE?': 'Vymazat CACHE?',
|
||||
'Clear DISK': 'Vymazat DISK',
|
||||
'Clear RAM': 'Vymazat RAM',
|
||||
'Click row to expand traceback': 'Pro rozbalení stopy, klikněte na řádek',
|
||||
'Click row to view a ticket': 'Pro zobrazení chyby (ticketu), klikněte na řádku...',
|
||||
'Client IP': 'IP adresa klienta',
|
||||
'code': 'code',
|
||||
'Code listing': 'Code listing',
|
||||
'collapse/expand all': 'vše sbalit/rozbalit',
|
||||
'Community': 'Komunita',
|
||||
'Compile': 'Zkompilovat',
|
||||
'compiled application removed': 'zkompilovaná aplikace smazána',
|
||||
'Components and Plugins': 'Komponenty a zásuvné moduly',
|
||||
'Condition': 'Podmínka',
|
||||
'continue': 'continue',
|
||||
'Controller': 'Kontrolér (Controller)',
|
||||
'Controllers': 'Kontroléry',
|
||||
'controllers': 'kontroléry',
|
||||
'Copyright': 'Copyright',
|
||||
'Count': 'Počet',
|
||||
'Create': 'Vytvořit',
|
||||
'create file with filename:': 'vytvořit soubor s názvem:',
|
||||
'created by': 'vytvořil',
|
||||
'Created By': 'Vytvořeno - kým',
|
||||
'Created On': 'Vytvořeno - kdy',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Aktuální požadavek',
|
||||
'Current response': 'Aktuální odpověď',
|
||||
'Current session': 'Aktuální relace',
|
||||
'currently running': 'právě běží',
|
||||
'currently saved or': 'uloženo nebo',
|
||||
'customize me!': 'upravte mě!',
|
||||
'data uploaded': 'data nahrána',
|
||||
'Database': 'Rozhraní databáze',
|
||||
'Database %s select': 'databáze %s výběr',
|
||||
'Database administration': 'Database administration',
|
||||
'database administration': 'správa databáze',
|
||||
'Date and Time': 'Datum a čas',
|
||||
'day': 'den',
|
||||
'db': 'db',
|
||||
'DB Model': 'Databázový model',
|
||||
'Debug': 'Ladění',
|
||||
'defines tables': 'defines tables',
|
||||
'Delete': 'Smazat',
|
||||
'delete': 'smazat',
|
||||
'delete all checked': 'smazat vše označené',
|
||||
'delete plugin': 'delete plugin',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Smazat tento soubor (budete požádán o potvrzení mazání)',
|
||||
'Delete:': 'Smazat:',
|
||||
'deleted after first hit': 'smazat po prvním dosažení',
|
||||
'Demo': 'Demo',
|
||||
'Deploy': 'Nahrát',
|
||||
'Deploy on Google App Engine': 'Nahrát na Google App Engine',
|
||||
'Deploy to OpenShift': 'Nahrát na OpenShift',
|
||||
'Deployment Recipes': 'Postupy pro deployment',
|
||||
'Description': 'Popis',
|
||||
'design': 'návrh',
|
||||
'Detailed traceback description': 'Podrobný výpis prostředí',
|
||||
'details': 'podrobnosti',
|
||||
'direction: ltr': 'směr: ltr',
|
||||
'Disable': 'Zablokovat',
|
||||
'DISK': 'DISK',
|
||||
'Disk Cache Keys': 'Klíče diskové cache',
|
||||
'Disk Cleared': 'Disk smazán',
|
||||
'docs': 'dokumentace',
|
||||
'Documentation': 'Dokumentace',
|
||||
"Don't know what to do?": 'Nevíte kudy kam?',
|
||||
'done!': 'hotovo!',
|
||||
'Download': 'Stáhnout',
|
||||
'download layouts': 'stáhnout moduly rozvržení stránky',
|
||||
'download plugins': 'stáhnout zásuvné moduly',
|
||||
'E-mail': 'E-mail',
|
||||
'Edit': 'Upravit',
|
||||
'edit all': 'edit all',
|
||||
'Edit application': 'Správa aplikace',
|
||||
'edit controller': 'edit controller',
|
||||
'Edit current record': 'Upravit aktuální záznam',
|
||||
'Edit Profile': 'Upravit profil',
|
||||
'edit views:': 'upravit pohled:',
|
||||
'Editing file "%s"': 'Úprava souboru "%s"',
|
||||
'Editing Language file': 'Úprava jazykového souboru',
|
||||
'Editing Plural Forms File': 'Editing Plural Forms File',
|
||||
'Email and SMS': 'Email a SMS',
|
||||
'Enable': 'Odblokovat',
|
||||
'enter a number between %(min)g and %(max)g': 'zadejte číslo mezi %(min)g a %(max)g',
|
||||
'enter an integer between %(min)g and %(max)g': 'zadejte celé číslo mezi %(min)g a %(max)g',
|
||||
'Error': 'Chyba',
|
||||
'Error logs for "%(app)s"': 'Seznam výskytu chyb pro aplikaci "%(app)s"',
|
||||
'Error snapshot': 'Snapshot chyby',
|
||||
'Error ticket': 'Ticket chyby',
|
||||
'Errors': 'Chyby',
|
||||
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
|
||||
'Exception %s': 'Exception %s',
|
||||
'Exception instance attributes': 'Prvky instance výjimky',
|
||||
'Expand Abbreviation': 'Expand Abbreviation',
|
||||
'export as csv file': 'exportovat do .csv souboru',
|
||||
'exposes': 'vystavuje',
|
||||
'exposes:': 'vystavuje funkce:',
|
||||
'extends': 'rozšiřuje',
|
||||
'failed to compile file because:': 'soubor se nepodařilo zkompilovat, protože:',
|
||||
'FAQ': 'Často kladené dotazy',
|
||||
'File': 'Soubor',
|
||||
'file': 'soubor',
|
||||
'file "%(filename)s" created': 'file "%(filename)s" created',
|
||||
'file saved on %(time)s': 'soubor uložen %(time)s',
|
||||
'file saved on %s': 'soubor uložen %s',
|
||||
'Filename': 'Název souboru',
|
||||
'filter': 'filtr',
|
||||
'Find Next': 'Najít další',
|
||||
'Find Previous': 'Najít předchozí',
|
||||
'First name': 'Křestní jméno',
|
||||
'Forgot username?': 'Zapomněl jste svoje přihlašovací jméno?',
|
||||
'forgot username?': 'zapomněl jste svoje přihlašovací jméno?',
|
||||
'Forms and Validators': 'Formuláře a validátory',
|
||||
'Frames': 'Frames',
|
||||
'Free Applications': 'Aplikace zdarma',
|
||||
'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.',
|
||||
'Generate': 'Vytvořit',
|
||||
'Get from URL:': 'Stáhnout z internetu:',
|
||||
'Git Pull': 'Git Pull',
|
||||
'Git Push': 'Git Push',
|
||||
'Globals##debug': 'Globální proměnné',
|
||||
'go!': 'OK!',
|
||||
'Goto': 'Goto',
|
||||
'graph model': 'graph model',
|
||||
'Group %(group_id)s created': 'Skupina %(group_id)s vytvořena',
|
||||
'Group ID': 'ID skupiny',
|
||||
'Groups': 'Skupiny',
|
||||
'Hello World': 'Ahoj světe',
|
||||
'Help': 'Nápověda',
|
||||
'Hide/Show Translated strings': 'Skrýt/Zobrazit přeložené texty',
|
||||
'Hits': 'Kolikrát dosaženo',
|
||||
'Home': 'Domovská stránka',
|
||||
'honored only if the expression evaluates to true': 'brát v potaz jen když se tato podmínka vyhodnotí kladně',
|
||||
'How did you get here?': 'Jak jste se sem vlastně dostal?',
|
||||
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
|
||||
'import': 'import',
|
||||
'Import/Export': 'Import/Export',
|
||||
'includes': 'zahrnuje',
|
||||
'Index': 'Index',
|
||||
'insert new': 'vložit nový záznam ',
|
||||
'insert new %s': 'vložit nový záznam %s',
|
||||
'inspect attributes': 'inspect attributes',
|
||||
'Install': 'Instalovat',
|
||||
'Installed applications': 'Nainstalované aplikace',
|
||||
'Interaction at %s line %s': 'Interakce v %s, na řádce %s',
|
||||
'Interactive console': 'Interaktivní příkazová řádka',
|
||||
'Internal State': 'Vnitřní stav',
|
||||
'Introduction': 'Úvod',
|
||||
'Invalid email': 'Neplatný email',
|
||||
'Invalid password': 'Nesprávné heslo',
|
||||
'invalid password.': 'neplatné heslo',
|
||||
'Invalid Query': 'Neplatný dotaz',
|
||||
'invalid request': 'Neplatný požadavek',
|
||||
'Is Active': 'Je aktivní',
|
||||
'It is %s %%{day} today.': 'Dnes je to %s %%{den}.',
|
||||
'Key': 'Klíč',
|
||||
'Key bindings': 'Vazby klíčů',
|
||||
'Key bindings for ZenCoding Plugin': 'Key bindings for ZenCoding Plugin',
|
||||
'languages': 'jazyky',
|
||||
'Languages': 'Jazyky',
|
||||
'Last name': 'Příjmení',
|
||||
'Last saved on:': 'Naposledy uloženo:',
|
||||
'Layout': 'Rozvržení stránky (layout)',
|
||||
'Layout Plugins': 'Moduly rozvržení stránky (Layout Plugins)',
|
||||
'Layouts': 'Rozvržení stránek',
|
||||
'License for': 'Licence pro',
|
||||
'Line number': 'Číslo řádku',
|
||||
'LineNo': 'Č.řádku',
|
||||
'Live Chat': 'Online pokec',
|
||||
'loading...': 'nahrávám...',
|
||||
'locals': 'locals',
|
||||
'Locals##debug': 'Lokální proměnné',
|
||||
'Logged in': 'Přihlášení proběhlo úspěšně',
|
||||
'Logged out': 'Odhlášení proběhlo úspěšně',
|
||||
'Login': 'Přihlásit se',
|
||||
'login': 'přihlásit se',
|
||||
'Login to the Administrative Interface': 'Přihlásit se do Správce aplikací',
|
||||
'logout': 'odhlásit se',
|
||||
'Logout': 'Odhlásit se',
|
||||
'Lost Password': 'Zapomněl jste heslo',
|
||||
'Lost password?': 'Zapomněl jste heslo?',
|
||||
'lost password?': 'zapomněl jste heslo?',
|
||||
'Manage': 'Manage',
|
||||
'Manage Cache': 'Manage Cache',
|
||||
'Menu Model': 'Model rozbalovací nabídky',
|
||||
'Models': 'Modely',
|
||||
'models': 'modely',
|
||||
'Modified By': 'Změněno - kým',
|
||||
'Modified On': 'Změněno - kdy',
|
||||
'Modules': 'Moduly',
|
||||
'modules': 'moduly',
|
||||
'My Sites': 'Správa aplikací',
|
||||
'Name': 'Jméno',
|
||||
'new application "%s" created': 'nová aplikace "%s" vytvořena',
|
||||
'New Application Wizard': 'Nový průvodce aplikací',
|
||||
'New application wizard': 'Nový průvodce aplikací',
|
||||
'New password': 'Nové heslo',
|
||||
'New Record': 'Nový záznam',
|
||||
'new record inserted': 'nový záznam byl založen',
|
||||
'New simple application': 'Vytvořit primitivní aplikaci',
|
||||
'next': 'next',
|
||||
'next 100 rows': 'dalších 100 řádků',
|
||||
'No databases in this application': 'V této aplikaci nejsou žádné databáze',
|
||||
'No Interaction yet': 'Ještě žádná interakce nenastala',
|
||||
'No ticket_storage.txt found under /private folder': 'Soubor ticket_storage.txt v adresáři /private nenalezen',
|
||||
'Object or table name': 'Objekt či tabulka',
|
||||
'Old password': 'Původní heslo',
|
||||
'online designer': 'online návrhář',
|
||||
'Online examples': 'Příklady online',
|
||||
'Open new app in new window': 'Open new app in new window',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Or Get from URL:',
|
||||
'or import from csv file': 'nebo importovat z .csv souboru',
|
||||
'Origin': 'Původ',
|
||||
'Original/Translation': 'Originál/Překlad',
|
||||
'Other Plugins': 'Ostatní moduly',
|
||||
'Other Recipes': 'Ostatní zásuvné moduly',
|
||||
'Overview': 'Přehled',
|
||||
'Overwrite installed app': 'Přepsat instalovanou aplikaci',
|
||||
'Pack all': 'Zabalit',
|
||||
'Pack compiled': 'Zabalit zkompilované',
|
||||
'pack plugin': 'pack plugin',
|
||||
'password': 'heslo',
|
||||
'Password': 'Heslo',
|
||||
"Password fields don't match": 'Hesla se neshodují',
|
||||
'Peeking at file': 'Peeking at file',
|
||||
'Please': 'Prosím',
|
||||
'Plugin "%s" in application': 'Plugin "%s" in application',
|
||||
'plugins': 'zásuvné moduly',
|
||||
'Plugins': 'Zásuvné moduly',
|
||||
'Plural Form #%s': 'Plural Form #%s',
|
||||
'Plural-Forms:': 'Množná čísla:',
|
||||
'Powered by': 'Poháněno',
|
||||
'Preface': 'Předmluva',
|
||||
'previous 100 rows': 'předchozích 100 řádků',
|
||||
'Private files': 'Soukromé soubory',
|
||||
'private files': 'soukromé soubory',
|
||||
'profile': 'profil',
|
||||
'Project Progress': 'Vývoj projektu',
|
||||
'Python': 'Python',
|
||||
'Query:': 'Dotaz:',
|
||||
'Quick Examples': 'Krátké příklady',
|
||||
'RAM': 'RAM',
|
||||
'RAM Cache Keys': 'Klíče RAM Cache',
|
||||
'Ram Cleared': 'RAM smazána',
|
||||
'Readme': 'Nápověda',
|
||||
'Recipes': 'Postupy jak na to',
|
||||
'Record': 'Záznam',
|
||||
'record does not exist': 'záznam neexistuje',
|
||||
'Record ID': 'ID záznamu',
|
||||
'Record id': 'id záznamu',
|
||||
'refresh': 'obnovte',
|
||||
'register': 'registrovat',
|
||||
'Register': 'Zaregistrovat se',
|
||||
'Registration identifier': 'Registrační identifikátor',
|
||||
'Registration key': 'Registrační klíč',
|
||||
'reload': 'reload',
|
||||
'Reload routes': 'Znovu nahrát cesty',
|
||||
'Remember me (for 30 days)': 'Zapamatovat na 30 dní',
|
||||
'Remove compiled': 'Odstranit zkompilované',
|
||||
'Removed Breakpoint on %s at line %s': 'Bod přerušení smazán - soubor %s na řádce %s',
|
||||
'Replace': 'Zaměnit',
|
||||
'Replace All': 'Zaměnit vše',
|
||||
'request': 'request',
|
||||
'Reset Password key': 'Reset registračního klíče',
|
||||
'response': 'response',
|
||||
'restart': 'restart',
|
||||
'restore': 'obnovit',
|
||||
'Retrieve username': 'Získat přihlašovací jméno',
|
||||
'return': 'return',
|
||||
'revert': 'vrátit se k původnímu',
|
||||
'Role': 'Role',
|
||||
'Rows in Table': 'Záznamy v tabulce',
|
||||
'Rows selected': 'Záznamů zobrazeno',
|
||||
'rules are not defined': 'pravidla nejsou definována',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Spustí testy v tomto souboru (ke spuštění všech testů, použijte tlačítko 'test')",
|
||||
'Running on %s': 'Běží na %s',
|
||||
'Save': 'Uložit',
|
||||
'Save file:': 'Save file:',
|
||||
'Save via Ajax': 'Uložit pomocí Ajaxu',
|
||||
'Saved file hash:': 'hash uloženého souboru:',
|
||||
'Semantic': 'Modul semantic',
|
||||
'Services': 'Služby',
|
||||
'session': 'session',
|
||||
'session expired': 'session expired',
|
||||
'Set Breakpoint on %s at line %s: %s': 'Bod přerušení nastaven v souboru %s na řádce %s: %s',
|
||||
'shell': 'příkazová řádka',
|
||||
'Singular Form': 'Singular Form',
|
||||
'Site': 'Správa aplikací',
|
||||
'Size of cache:': 'Velikost cache:',
|
||||
'skip to generate': 'skip to generate',
|
||||
'Sorry, could not find mercurial installed': 'Bohužel mercurial není nainstalován.',
|
||||
'Start a new app': 'Vytvořit novou aplikaci',
|
||||
'Start searching': 'Začít hledání',
|
||||
'Start wizard': 'Spustit průvodce',
|
||||
'state': 'stav',
|
||||
'Static': 'Static',
|
||||
'static': 'statické soubory',
|
||||
'Static files': 'Statické soubory',
|
||||
'Statistics': 'Statistika',
|
||||
'Step': 'Step',
|
||||
'step': 'step',
|
||||
'stop': 'stop',
|
||||
'Stylesheet': 'CSS styly',
|
||||
'submit': 'odeslat',
|
||||
'Submit': 'Odeslat',
|
||||
'successful': 'úspěšně',
|
||||
'Support': 'Podpora',
|
||||
'Sure you want to delete this object?': 'Opravdu chcete smazat tento objekt?',
|
||||
'Table': 'tabulka',
|
||||
'Table name': 'Název tabulky',
|
||||
'Temporary': 'Dočasný',
|
||||
'test': 'test',
|
||||
'Testing application': 'Testing application',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Dotaz" je podmínka, například "db.tabulka1.pole1==\'hodnota\'". Podmínka "db.tabulka1.pole1==db.tabulka2.pole2" pak vytvoří SQL JOIN.',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'Logika aplikace: každá URL je mapována na funkci vystavovanou kontrolérem.',
|
||||
'The Core': 'Jádro (The Core)',
|
||||
'The data representation, define database tables and sets': 'Reprezentace dat: definovat tabulky databáze a záznamy',
|
||||
'The output of the file is a dictionary that was rendered by the view %s': 'Výstup ze souboru je slovník, který se zobrazil v pohledu %s.',
|
||||
'The presentations layer, views are also known as templates': 'Prezentační vrstva: pohledy či templaty (šablony)',
|
||||
'The Views': 'Pohledy (The Views)',
|
||||
'There are no controllers': 'There are no controllers',
|
||||
'There are no modules': 'There are no modules',
|
||||
'There are no plugins': 'Žádné moduly nejsou instalovány.',
|
||||
'There are no private files': 'Žádné soukromé soubory neexistují.',
|
||||
'There are no static files': 'There are no static files',
|
||||
'There are no translators, only default language is supported': 'There are no translators, only default language is supported',
|
||||
'There are no views': 'There are no views',
|
||||
'These files are not served, they are only available from within your app': 'Tyto soubory jsou klientům nepřístupné. K dispozici jsou pouze v rámci aplikace.',
|
||||
'These files are served without processing, your images go here': 'Tyto soubory jsou servírovány bez přídavné logiky, sem patří např. obrázky.',
|
||||
'This App': 'Tato aplikace',
|
||||
'This is a copy of the scaffolding application': 'Toto je kopie aplikace skelet.',
|
||||
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
|
||||
'This is the %(filename)s template': 'This is the %(filename)s template',
|
||||
'this page to see if a breakpoint was hit and debug interaction is required.': 'tuto stránku, abyste uviděli, zda se dosáhlo bodu přerušení.',
|
||||
'Ticket': 'Ticket',
|
||||
'Ticket ID': 'Ticket ID',
|
||||
'Time in Cache (h:m:s)': 'Čas v Cache (h:m:s)',
|
||||
'Timestamp': 'Časové razítko',
|
||||
'to previous version.': 'k předchozí verzi.',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'Zásuvný modul vytvoříte tak, že pojmenujete soubor/adresář plugin_[jméno modulu]',
|
||||
'To emulate a breakpoint programatically, write:': 'K nastavení bodu přerušení v kódu programu, napište:',
|
||||
'to use the debugger!': ', abyste mohli ladící program používat!',
|
||||
'toggle breakpoint': 'vyp./zap. bod přerušení',
|
||||
'Toggle Fullscreen': 'Na celou obrazovku a zpět',
|
||||
'too short': 'Příliš krátké',
|
||||
'Traceback': 'Traceback',
|
||||
'Translation strings for the application': 'Překlad textů pro aplikaci',
|
||||
'try something like': 'try something like',
|
||||
'Try the mobile interface': 'Zkuste rozhraní pro mobilní zařízení',
|
||||
'try view': 'try view',
|
||||
'Twitter': 'Twitter',
|
||||
'Type python statement in here and hit Return (Enter) to execute it.': 'Type python statement in here and hit Return (Enter) to execute it.',
|
||||
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
|
||||
'Unable to check for upgrades': 'Unable to check for upgrades',
|
||||
'unable to parse csv file': 'csv soubor nedá sa zpracovat',
|
||||
'uncheck all': 'vše odznačit',
|
||||
'Uninstall': 'Odinstalovat',
|
||||
'update': 'aktualizovat',
|
||||
'update all languages': 'aktualizovat všechny jazyky',
|
||||
'Update:': 'Upravit:',
|
||||
'Upgrade': 'Upgrade',
|
||||
'upgrade now': 'upgrade now',
|
||||
'upgrade now to %s': 'upgrade now to %s',
|
||||
'upload': 'nahrát',
|
||||
'Upload': 'Upload',
|
||||
'Upload a package:': 'Nahrát balík:',
|
||||
'Upload and install packed application': 'Nahrát a instalovat zabalenou aplikaci',
|
||||
'upload file:': 'nahrát soubor:',
|
||||
'upload plugin file:': 'nahrát soubor modulu:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Použijte (...)&(...) pro AND, (...)|(...) pro OR a ~(...) pro NOT pro sestavení složitějších dotazů.',
|
||||
'User %(id)s Logged-in': 'Uživatel %(id)s přihlášen',
|
||||
'User %(id)s Logged-out': 'Uživatel %(id)s odhlášen',
|
||||
'User %(id)s Password changed': 'Uživatel %(id)s změnil heslo',
|
||||
'User %(id)s Profile updated': 'Uživatel %(id)s upravil profil',
|
||||
'User %(id)s Registered': 'Uživatel %(id)s se zaregistroval',
|
||||
'User %(id)s Username retrieved': 'Uživatel %(id)s si nachal zaslat přihlašovací jméno',
|
||||
'User ID': 'ID uživatele',
|
||||
'Username': 'Přihlašovací jméno',
|
||||
'variables': 'variables',
|
||||
'Verify Password': 'Zopakujte heslo',
|
||||
'Version': 'Verze',
|
||||
'Version %s.%s.%s (%s) %s': 'Verze %s.%s.%s (%s) %s',
|
||||
'Versioning': 'Verzování',
|
||||
'Videos': 'Videa',
|
||||
'View': 'Pohled (View)',
|
||||
'Views': 'Pohledy',
|
||||
'views': 'pohledy',
|
||||
'Web Framework': 'Web Framework',
|
||||
'web2py is up to date': 'Máte aktuální verzi web2py.',
|
||||
'web2py online debugger': 'Ladící online web2py program',
|
||||
'web2py Recent Tweets': 'Štěbetání na Twitteru o web2py',
|
||||
'web2py upgrade': 'web2py upgrade',
|
||||
'web2py upgraded; please restart it': 'web2py upgraded; please restart it',
|
||||
'Welcome': 'Vítejte',
|
||||
'Welcome to web2py': 'Vitejte ve web2py',
|
||||
'Welcome to web2py!': 'Vítejte ve web2py!',
|
||||
'Which called the function %s located in the file %s': 'která zavolala funkci %s v souboru (kontroléru) %s.',
|
||||
'You are successfully running web2py': 'Úspěšně jste spustili web2py.',
|
||||
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'Nastavovat a mazat body přerušení je též možno v rámci editování zdrojového souboru přes tlačítko Vyp./Zap. bod přerušení',
|
||||
'You can modify this application and adapt it to your needs': 'Tuto aplikaci si můžete upravit a přizpůsobit ji svým potřebám.',
|
||||
'You need to set up and reach a': 'Je třeba nejprve nastavit a dojít až na',
|
||||
'You visited the url %s': 'Navštívili jste stránku %s,',
|
||||
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Aplikace bude blokována než se klikne na jedno z tlačítek (další, krok, pokračovat, atd.)',
|
||||
'You can inspect variables using the console bellow': 'Níže pomocí příkazové řádky si můžete prohlédnout proměnné',
|
||||
}
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'cs-cz',
|
||||
'!langname!': 'čeština',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': 'Kolonka "Upravit" je nepovinný výraz, například "pole1=\'nováhodnota\'". Výsledky databázového JOINu nemůžete mazat ani upravovat.',
|
||||
'"User Exception" debug mode. An error ticket could be issued!': '"User Exception" debug mode. An error ticket could be issued!',
|
||||
'%%{Row} in Table': '%%{řádek} v tabulce',
|
||||
'%%{Row} selected': 'označených %%{řádek}',
|
||||
'%s %%{row} deleted': '%s smazaných %%{záznam}',
|
||||
'%s %%{row} updated': '%s upravených %%{záznam}',
|
||||
'%s selected': '%s označených',
|
||||
'%Y-%m-%d': '%d.%m.%Y',
|
||||
'%Y-%m-%d %H:%M:%S': '%d.%m.%Y %H:%M:%S',
|
||||
'(requires internet access)': '(vyžaduje připojení k internetu)',
|
||||
'(requires internet access, experimental)': '(requires internet access, experimental)',
|
||||
'(something like "it-it")': '(například "cs-cs")',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)',
|
||||
'@markmin\x01Searching: **%s** %%{file}': 'Hledání: **%s** %%{soubor}',
|
||||
'About': 'O programu',
|
||||
'About application': 'O aplikaci',
|
||||
'Access Control': 'Řízení přístupu',
|
||||
'Add breakpoint': 'Přidat bod přerušení',
|
||||
'Additional code for your application': 'Další kód pro Vaši aplikaci',
|
||||
'Admin design page': 'Admin design page',
|
||||
'Admin language': 'jazyk rozhraní',
|
||||
'Administrative interface': 'pro administrátorské rozhraní klikněte sem',
|
||||
'Administrative Interface': 'Administrátorské rozhraní',
|
||||
'administrative interface': 'rozhraní pro správu',
|
||||
'Administrator Password:': 'Administrátorské heslo:',
|
||||
'Ajax Recipes': 'Recepty s ajaxem',
|
||||
'An error occured, please %s the page': 'An error occured, please %s the page',
|
||||
'and rename it:': 'a přejmenovat na:',
|
||||
'appadmin': 'appadmin',
|
||||
'appadmin is disabled because insecure channel': 'appadmin je zakázaná bez zabezpečeného spojení',
|
||||
'Application': 'Application',
|
||||
'application "%s" uninstalled': 'application "%s" odinstalována',
|
||||
'application compiled': 'aplikace zkompilována',
|
||||
'Application name:': 'Název aplikace:',
|
||||
'are not used': 'nepoužita',
|
||||
'are not used yet': 'ještě nepoužita',
|
||||
'Are you sure you want to delete this object?': 'Opravdu chcete odstranit tento objekt?',
|
||||
'Are you sure you want to uninstall application "%s"?': 'Opravdu chcete odinstalovat aplikaci "%s"?',
|
||||
'arguments': 'arguments',
|
||||
'at char %s': 'at char %s',
|
||||
'at line %s': 'at line %s',
|
||||
'ATTENTION:': 'ATTENTION:',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.',
|
||||
'Available Databases and Tables': 'Dostupné databáze a tabulky',
|
||||
'back': 'zpět',
|
||||
'Back to wizard': 'Back to wizard',
|
||||
'Basics': 'Basics',
|
||||
'Begin': 'Začít',
|
||||
'breakpoint': 'bod přerušení',
|
||||
'Breakpoints': 'Body přerušení',
|
||||
'breakpoints': 'body přerušení',
|
||||
'Buy this book': 'Koupit web2py knihu',
|
||||
'Cache': 'Cache',
|
||||
'cache': 'cache',
|
||||
'Cache Keys': 'Klíče cache',
|
||||
'cache, errors and sessions cleaned': 'cache, chyby a relace byly pročištěny',
|
||||
'can be a git repo': 'může to být git repo',
|
||||
'Cancel': 'Storno',
|
||||
'Cannot be empty': 'Nemůže být prázdné',
|
||||
'Change Admin Password': 'Změnit heslo pro správu',
|
||||
'Change admin password': 'Změnit heslo pro správu aplikací',
|
||||
'Change password': 'Změna hesla',
|
||||
'check all': 'vše označit',
|
||||
'Check for upgrades': 'Zkusit aktualizovat',
|
||||
'Check to delete': 'Označit ke smazání',
|
||||
'Check to delete:': 'Označit ke smazání:',
|
||||
'Checking for upgrades...': 'Zjišťuji, zda jsou k dispozici aktualizace...',
|
||||
'Clean': 'Pročistit',
|
||||
'Clear CACHE?': 'Vymazat CACHE?',
|
||||
'Clear DISK': 'Vymazat DISK',
|
||||
'Clear RAM': 'Vymazat RAM',
|
||||
'Click row to expand traceback': 'Pro rozbalení stopy, klikněte na řádek',
|
||||
'Click row to view a ticket': 'Pro zobrazení chyby (ticketu), klikněte na řádku...',
|
||||
'Client IP': 'IP adresa klienta',
|
||||
'code': 'code',
|
||||
'Code listing': 'Code listing',
|
||||
'collapse/expand all': 'vše sbalit/rozbalit',
|
||||
'Community': 'Komunita',
|
||||
'Compile': 'Zkompilovat',
|
||||
'compiled application removed': 'zkompilovaná aplikace smazána',
|
||||
'Components and Plugins': 'Komponenty a zásuvné moduly',
|
||||
'Condition': 'Podmínka',
|
||||
'continue': 'continue',
|
||||
'Controller': 'Kontrolér (Controller)',
|
||||
'Controllers': 'Kontroléry',
|
||||
'controllers': 'kontroléry',
|
||||
'Copyright': 'Copyright',
|
||||
'Count': 'Počet',
|
||||
'Create': 'Vytvořit',
|
||||
'create file with filename:': 'vytvořit soubor s názvem:',
|
||||
'created by': 'vytvořil',
|
||||
'Created By': 'Vytvořeno - kým',
|
||||
'Created On': 'Vytvořeno - kdy',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Aktuální požadavek',
|
||||
'Current response': 'Aktuální odpověď',
|
||||
'Current session': 'Aktuální relace',
|
||||
'currently running': 'právě běží',
|
||||
'currently saved or': 'uloženo nebo',
|
||||
'customize me!': 'upravte mě!',
|
||||
'data uploaded': 'data nahrána',
|
||||
'Database': 'Rozhraní databáze',
|
||||
'Database %s select': 'databáze %s výběr',
|
||||
'Database administration': 'Database administration',
|
||||
'database administration': 'správa databáze',
|
||||
'Date and Time': 'Datum a čas',
|
||||
'day': 'den',
|
||||
'db': 'db',
|
||||
'DB Model': 'Databázový model',
|
||||
'Debug': 'Ladění',
|
||||
'defines tables': 'defines tables',
|
||||
'Delete': 'Smazat',
|
||||
'delete': 'smazat',
|
||||
'delete all checked': 'smazat vše označené',
|
||||
'delete plugin': 'delete plugin',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Smazat tento soubor (budete požádán o potvrzení mazání)',
|
||||
'Delete:': 'Smazat:',
|
||||
'deleted after first hit': 'smazat po prvním dosažení',
|
||||
'Demo': 'Demo',
|
||||
'Deploy': 'Nahrát',
|
||||
'Deploy on Google App Engine': 'Nahrát na Google App Engine',
|
||||
'Deploy to OpenShift': 'Nahrát na OpenShift',
|
||||
'Deployment Recipes': 'Postupy pro deployment',
|
||||
'Description': 'Popis',
|
||||
'design': 'návrh',
|
||||
'Detailed traceback description': 'Podrobný výpis prostředí',
|
||||
'details': 'podrobnosti',
|
||||
'direction: ltr': 'směr: ltr',
|
||||
'Disable': 'Zablokovat',
|
||||
'DISK': 'DISK',
|
||||
'Disk Cache Keys': 'Klíče diskové cache',
|
||||
'Disk Cleared': 'Disk smazán',
|
||||
'docs': 'dokumentace',
|
||||
'Documentation': 'Dokumentace',
|
||||
"Don't know what to do?": 'Nevíte kudy kam?',
|
||||
'done!': 'hotovo!',
|
||||
'Download': 'Stáhnout',
|
||||
'download layouts': 'stáhnout moduly rozvržení stránky',
|
||||
'download plugins': 'stáhnout zásuvné moduly',
|
||||
'E-mail': 'E-mail',
|
||||
'Edit': 'Upravit',
|
||||
'edit all': 'edit all',
|
||||
'Edit application': 'Správa aplikace',
|
||||
'edit controller': 'edit controller',
|
||||
'Edit current record': 'Upravit aktuální záznam',
|
||||
'Edit Profile': 'Upravit profil',
|
||||
'edit views:': 'upravit pohled:',
|
||||
'Editing file "%s"': 'Úprava souboru "%s"',
|
||||
'Editing Language file': 'Úprava jazykového souboru',
|
||||
'Editing Plural Forms File': 'Editing Plural Forms File',
|
||||
'Email and SMS': 'Email a SMS',
|
||||
'Enable': 'Odblokovat',
|
||||
'enter a number between %(min)g and %(max)g': 'zadejte číslo mezi %(min)g a %(max)g',
|
||||
'enter an integer between %(min)g and %(max)g': 'zadejte celé číslo mezi %(min)g a %(max)g',
|
||||
'Error': 'Chyba',
|
||||
'Error logs for "%(app)s"': 'Seznam výskytu chyb pro aplikaci "%(app)s"',
|
||||
'Error snapshot': 'Snapshot chyby',
|
||||
'Error ticket': 'Ticket chyby',
|
||||
'Errors': 'Chyby',
|
||||
'Exception %(extype)s: %(exvalue)s': 'Exception %(extype)s: %(exvalue)s',
|
||||
'Exception %s': 'Exception %s',
|
||||
'Exception instance attributes': 'Prvky instance výjimky',
|
||||
'Expand Abbreviation': 'Expand Abbreviation',
|
||||
'export as csv file': 'exportovat do .csv souboru',
|
||||
'exposes': 'vystavuje',
|
||||
'exposes:': 'vystavuje funkce:',
|
||||
'extends': 'rozšiřuje',
|
||||
'failed to compile file because:': 'soubor se nepodařilo zkompilovat, protože:',
|
||||
'FAQ': 'Často kladené dotazy',
|
||||
'File': 'Soubor',
|
||||
'file': 'soubor',
|
||||
'file "%(filename)s" created': 'file "%(filename)s" created',
|
||||
'file saved on %(time)s': 'soubor uložen %(time)s',
|
||||
'file saved on %s': 'soubor uložen %s',
|
||||
'Filename': 'Název souboru',
|
||||
'filter': 'filtr',
|
||||
'Find Next': 'Najít další',
|
||||
'Find Previous': 'Najít předchozí',
|
||||
'First name': 'Křestní jméno',
|
||||
'Forgot username?': 'Zapomněl jste svoje přihlašovací jméno?',
|
||||
'forgot username?': 'zapomněl jste svoje přihlašovací jméno?',
|
||||
'Forms and Validators': 'Formuláře a validátory',
|
||||
'Frames': 'Frames',
|
||||
'Free Applications': 'Aplikace zdarma',
|
||||
'Functions with no doctests will result in [passed] tests.': 'Functions with no doctests will result in [passed] tests.',
|
||||
'Generate': 'Vytvořit',
|
||||
'Get from URL:': 'Stáhnout z internetu:',
|
||||
'Git Pull': 'Git Pull',
|
||||
'Git Push': 'Git Push',
|
||||
'Globals##debug': 'Globální proměnné',
|
||||
'go!': 'OK!',
|
||||
'Goto': 'Goto',
|
||||
'graph model': 'graph model',
|
||||
'Group %(group_id)s created': 'Skupina %(group_id)s vytvořena',
|
||||
'Group ID': 'ID skupiny',
|
||||
'Groups': 'Skupiny',
|
||||
'Hello World': 'Ahoj světe',
|
||||
'Help': 'Nápověda',
|
||||
'Hide/Show Translated strings': 'Skrýt/Zobrazit přeložené texty',
|
||||
'Hits': 'Kolikrát dosaženo',
|
||||
'Home': 'Domovská stránka',
|
||||
'honored only if the expression evaluates to true': 'brát v potaz jen když se tato podmínka vyhodnotí kladně',
|
||||
'How did you get here?': 'Jak jste se sem vlastně dostal?',
|
||||
'If start the upgrade, be patient, it may take a while to download': 'If start the upgrade, be patient, it may take a while to download',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.',
|
||||
'import': 'import',
|
||||
'Import/Export': 'Import/Export',
|
||||
'includes': 'zahrnuje',
|
||||
'Index': 'Index',
|
||||
'insert new': 'vložit nový záznam ',
|
||||
'insert new %s': 'vložit nový záznam %s',
|
||||
'inspect attributes': 'inspect attributes',
|
||||
'Install': 'Instalovat',
|
||||
'Installed applications': 'Nainstalované aplikace',
|
||||
'Interaction at %s line %s': 'Interakce v %s, na řádce %s',
|
||||
'Interactive console': 'Interaktivní příkazová řádka',
|
||||
'Internal State': 'Vnitřní stav',
|
||||
'Introduction': 'Úvod',
|
||||
'Invalid email': 'Neplatný email',
|
||||
'Invalid password': 'Nesprávné heslo',
|
||||
'invalid password.': 'neplatné heslo',
|
||||
'Invalid Query': 'Neplatný dotaz',
|
||||
'invalid request': 'Neplatný požadavek',
|
||||
'Is Active': 'Je aktivní',
|
||||
'It is %s %%{day} today.': 'Dnes je to %s %%{den}.',
|
||||
'Key': 'Klíč',
|
||||
'Key bindings': 'Vazby klíčů',
|
||||
'Key bindings for ZenCoding Plugin': 'Key bindings for ZenCoding Plugin',
|
||||
'languages': 'jazyky',
|
||||
'Languages': 'Jazyky',
|
||||
'Last name': 'Příjmení',
|
||||
'Last saved on:': 'Naposledy uloženo:',
|
||||
'Layout': 'Rozvržení stránky (layout)',
|
||||
'Layout Plugins': 'Moduly rozvržení stránky (Layout Plugins)',
|
||||
'Layouts': 'Rozvržení stránek',
|
||||
'License for': 'Licence pro',
|
||||
'Line number': 'Číslo řádku',
|
||||
'LineNo': 'Č.řádku',
|
||||
'Live Chat': 'Online pokec',
|
||||
'loading...': 'nahrávám...',
|
||||
'locals': 'locals',
|
||||
'Locals##debug': 'Lokální proměnné',
|
||||
'Logged in': 'Přihlášení proběhlo úspěšně',
|
||||
'Logged out': 'Odhlášení proběhlo úspěšně',
|
||||
'Login': 'Přihlásit se',
|
||||
'login': 'přihlásit se',
|
||||
'Login to the Administrative Interface': 'Přihlásit se do Správce aplikací',
|
||||
'logout': 'odhlásit se',
|
||||
'Logout': 'Odhlásit se',
|
||||
'Lost Password': 'Zapomněl jste heslo',
|
||||
'Lost password?': 'Zapomněl jste heslo?',
|
||||
'lost password?': 'zapomněl jste heslo?',
|
||||
'Manage': 'Manage',
|
||||
'Manage Cache': 'Manage Cache',
|
||||
'Menu Model': 'Model rozbalovací nabídky',
|
||||
'Models': 'Modely',
|
||||
'models': 'modely',
|
||||
'Modified By': 'Změněno - kým',
|
||||
'Modified On': 'Změněno - kdy',
|
||||
'Modules': 'Moduly',
|
||||
'modules': 'moduly',
|
||||
'My Sites': 'Správa aplikací',
|
||||
'Name': 'Jméno',
|
||||
'new application "%s" created': 'nová aplikace "%s" vytvořena',
|
||||
'New Application Wizard': 'Nový průvodce aplikací',
|
||||
'New application wizard': 'Nový průvodce aplikací',
|
||||
'New password': 'Nové heslo',
|
||||
'New Record': 'Nový záznam',
|
||||
'new record inserted': 'nový záznam byl založen',
|
||||
'New simple application': 'Vytvořit primitivní aplikaci',
|
||||
'next': 'next',
|
||||
'next 100 rows': 'dalších 100 řádků',
|
||||
'No databases in this application': 'V této aplikaci nejsou žádné databáze',
|
||||
'No Interaction yet': 'Ještě žádná interakce nenastala',
|
||||
'No ticket_storage.txt found under /private folder': 'Soubor ticket_storage.txt v adresáři /private nenalezen',
|
||||
'Object or table name': 'Objekt či tabulka',
|
||||
'Old password': 'Původní heslo',
|
||||
'online designer': 'online návrhář',
|
||||
'Online examples': 'Příklady online',
|
||||
'Open new app in new window': 'Open new app in new window',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Or Get from URL:',
|
||||
'or import from csv file': 'nebo importovat z .csv souboru',
|
||||
'Origin': 'Původ',
|
||||
'Original/Translation': 'Originál/Překlad',
|
||||
'Other Plugins': 'Ostatní moduly',
|
||||
'Other Recipes': 'Ostatní zásuvné moduly',
|
||||
'Overview': 'Přehled',
|
||||
'Overwrite installed app': 'Přepsat instalovanou aplikaci',
|
||||
'Pack all': 'Zabalit',
|
||||
'Pack compiled': 'Zabalit zkompilované',
|
||||
'pack plugin': 'pack plugin',
|
||||
'password': 'heslo',
|
||||
'Password': 'Heslo',
|
||||
"Password fields don't match": 'Hesla se neshodují',
|
||||
'Peeking at file': 'Peeking at file',
|
||||
'Please': 'Prosím',
|
||||
'Plugin "%s" in application': 'Plugin "%s" in application',
|
||||
'plugins': 'zásuvné moduly',
|
||||
'Plugins': 'Zásuvné moduly',
|
||||
'Plural Form #%s': 'Plural Form #%s',
|
||||
'Plural-Forms:': 'Množná čísla:',
|
||||
'Powered by': 'Poháněno',
|
||||
'Preface': 'Předmluva',
|
||||
'previous 100 rows': 'předchozích 100 řádků',
|
||||
'Private files': 'Soukromé soubory',
|
||||
'private files': 'soukromé soubory',
|
||||
'profile': 'profil',
|
||||
'Project Progress': 'Vývoj projektu',
|
||||
'Python': 'Python',
|
||||
'Query:': 'Dotaz:',
|
||||
'Quick Examples': 'Krátké příklady',
|
||||
'RAM': 'RAM',
|
||||
'RAM Cache Keys': 'Klíče RAM Cache',
|
||||
'Ram Cleared': 'RAM smazána',
|
||||
'Readme': 'Nápověda',
|
||||
'Recipes': 'Postupy jak na to',
|
||||
'Record': 'Záznam',
|
||||
'record does not exist': 'záznam neexistuje',
|
||||
'Record ID': 'ID záznamu',
|
||||
'Record id': 'id záznamu',
|
||||
'refresh': 'obnovte',
|
||||
'register': 'registrovat',
|
||||
'Register': 'Zaregistrovat se',
|
||||
'Registration identifier': 'Registrační identifikátor',
|
||||
'Registration key': 'Registrační klíč',
|
||||
'reload': 'reload',
|
||||
'Reload routes': 'Znovu nahrát cesty',
|
||||
'Remember me (for 30 days)': 'Zapamatovat na 30 dní',
|
||||
'Remove compiled': 'Odstranit zkompilované',
|
||||
'Removed Breakpoint on %s at line %s': 'Bod přerušení smazán - soubor %s na řádce %s',
|
||||
'Replace': 'Zaměnit',
|
||||
'Replace All': 'Zaměnit vše',
|
||||
'request': 'request',
|
||||
'Reset Password key': 'Reset registračního klíče',
|
||||
'response': 'response',
|
||||
'restart': 'restart',
|
||||
'restore': 'obnovit',
|
||||
'Retrieve username': 'Získat přihlašovací jméno',
|
||||
'return': 'return',
|
||||
'revert': 'vrátit se k původnímu',
|
||||
'Role': 'Role',
|
||||
'Rows in Table': 'Záznamy v tabulce',
|
||||
'Rows selected': 'Záznamů zobrazeno',
|
||||
'rules are not defined': 'pravidla nejsou definována',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Spustí testy v tomto souboru (ke spuštění všech testů, použijte tlačítko 'test')",
|
||||
'Running on %s': 'Běží na %s',
|
||||
'Save': 'Uložit',
|
||||
'Save file:': 'Save file:',
|
||||
'Save via Ajax': 'Uložit pomocí Ajaxu',
|
||||
'Saved file hash:': 'hash uloženého souboru:',
|
||||
'Semantic': 'Modul semantic',
|
||||
'Services': 'Služby',
|
||||
'session': 'session',
|
||||
'session expired': 'session expired',
|
||||
'Set Breakpoint on %s at line %s: %s': 'Bod přerušení nastaven v souboru %s na řádce %s: %s',
|
||||
'shell': 'příkazová řádka',
|
||||
'Singular Form': 'Singular Form',
|
||||
'Site': 'Správa aplikací',
|
||||
'Size of cache:': 'Velikost cache:',
|
||||
'skip to generate': 'skip to generate',
|
||||
'Sorry, could not find mercurial installed': 'Bohužel mercurial není nainstalován.',
|
||||
'Start a new app': 'Vytvořit novou aplikaci',
|
||||
'Start searching': 'Začít hledání',
|
||||
'Start wizard': 'Spustit průvodce',
|
||||
'state': 'stav',
|
||||
'Static': 'Static',
|
||||
'static': 'statické soubory',
|
||||
'Static files': 'Statické soubory',
|
||||
'Statistics': 'Statistika',
|
||||
'Step': 'Step',
|
||||
'step': 'step',
|
||||
'stop': 'stop',
|
||||
'Stylesheet': 'CSS styly',
|
||||
'submit': 'odeslat',
|
||||
'Submit': 'Odeslat',
|
||||
'successful': 'úspěšně',
|
||||
'Support': 'Podpora',
|
||||
'Sure you want to delete this object?': 'Opravdu chcete smazat tento objekt?',
|
||||
'Table': 'tabulka',
|
||||
'Table name': 'Název tabulky',
|
||||
'Temporary': 'Dočasný',
|
||||
'test': 'test',
|
||||
'Testing application': 'Testing application',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Dotaz" je podmínka, například "db.tabulka1.pole1==\'hodnota\'". Podmínka "db.tabulka1.pole1==db.tabulka2.pole2" pak vytvoří SQL JOIN.',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'Logika aplikace: každá URL je mapována na funkci vystavovanou kontrolérem.',
|
||||
'The Core': 'Jádro (The Core)',
|
||||
'The data representation, define database tables and sets': 'Reprezentace dat: definovat tabulky databáze a záznamy',
|
||||
'The output of the file is a dictionary that was rendered by the view %s': 'Výstup ze souboru je slovník, který se zobrazil v pohledu %s.',
|
||||
'The presentations layer, views are also known as templates': 'Prezentační vrstva: pohledy či templaty (šablony)',
|
||||
'The Views': 'Pohledy (The Views)',
|
||||
'There are no controllers': 'There are no controllers',
|
||||
'There are no modules': 'There are no modules',
|
||||
'There are no plugins': 'Žádné moduly nejsou instalovány.',
|
||||
'There are no private files': 'Žádné soukromé soubory neexistují.',
|
||||
'There are no static files': 'There are no static files',
|
||||
'There are no translators, only default language is supported': 'There are no translators, only default language is supported',
|
||||
'There are no views': 'There are no views',
|
||||
'These files are not served, they are only available from within your app': 'Tyto soubory jsou klientům nepřístupné. K dispozici jsou pouze v rámci aplikace.',
|
||||
'These files are served without processing, your images go here': 'Tyto soubory jsou servírovány bez přídavné logiky, sem patří např. obrázky.',
|
||||
'This App': 'Tato aplikace',
|
||||
'This is a copy of the scaffolding application': 'Toto je kopie aplikace skelet.',
|
||||
'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk',
|
||||
'This is the %(filename)s template': 'This is the %(filename)s template',
|
||||
'this page to see if a breakpoint was hit and debug interaction is required.': 'tuto stránku, abyste uviděli, zda se dosáhlo bodu přerušení.',
|
||||
'Ticket': 'Ticket',
|
||||
'Ticket ID': 'Ticket ID',
|
||||
'Time in Cache (h:m:s)': 'Čas v Cache (h:m:s)',
|
||||
'Timestamp': 'Časové razítko',
|
||||
'to previous version.': 'k předchozí verzi.',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'Zásuvný modul vytvoříte tak, že pojmenujete soubor/adresář plugin_[jméno modulu]',
|
||||
'To emulate a breakpoint programatically, write:': 'K nastavení bodu přerušení v kódu programu, napište:',
|
||||
'to use the debugger!': ', abyste mohli ladící program používat!',
|
||||
'toggle breakpoint': 'vyp./zap. bod přerušení',
|
||||
'Toggle Fullscreen': 'Na celou obrazovku a zpět',
|
||||
'too short': 'Příliš krátké',
|
||||
'Traceback': 'Traceback',
|
||||
'Translation strings for the application': 'Překlad textů pro aplikaci',
|
||||
'try something like': 'try something like',
|
||||
'Try the mobile interface': 'Zkuste rozhraní pro mobilní zařízení',
|
||||
'try view': 'try view',
|
||||
'Twitter': 'Twitter',
|
||||
'Type python statement in here and hit Return (Enter) to execute it.': 'Type python statement in here and hit Return (Enter) to execute it.',
|
||||
'Type some Python code in here and hit Return (Enter) to execute it.': 'Type some Python code in here and hit Return (Enter) to execute it.',
|
||||
'Unable to check for upgrades': 'Unable to check for upgrades',
|
||||
'unable to parse csv file': 'csv soubor nedá sa zpracovat',
|
||||
'uncheck all': 'vše odznačit',
|
||||
'Uninstall': 'Odinstalovat',
|
||||
'update': 'aktualizovat',
|
||||
'update all languages': 'aktualizovat všechny jazyky',
|
||||
'Update:': 'Upravit:',
|
||||
'Upgrade': 'Upgrade',
|
||||
'upgrade now': 'upgrade now',
|
||||
'upgrade now to %s': 'upgrade now to %s',
|
||||
'upload': 'nahrát',
|
||||
'Upload': 'Upload',
|
||||
'Upload a package:': 'Nahrát balík:',
|
||||
'Upload and install packed application': 'Nahrát a instalovat zabalenou aplikaci',
|
||||
'upload file:': 'nahrát soubor:',
|
||||
'upload plugin file:': 'nahrát soubor modulu:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Použijte (...)&(...) pro AND, (...)|(...) pro OR a ~(...) pro NOT pro sestavení složitějších dotazů.',
|
||||
'User %(id)s Logged-in': 'Uživatel %(id)s přihlášen',
|
||||
'User %(id)s Logged-out': 'Uživatel %(id)s odhlášen',
|
||||
'User %(id)s Password changed': 'Uživatel %(id)s změnil heslo',
|
||||
'User %(id)s Profile updated': 'Uživatel %(id)s upravil profil',
|
||||
'User %(id)s Registered': 'Uživatel %(id)s se zaregistroval',
|
||||
'User %(id)s Username retrieved': 'Uživatel %(id)s si nachal zaslat přihlašovací jméno',
|
||||
'User ID': 'ID uživatele',
|
||||
'Username': 'Přihlašovací jméno',
|
||||
'variables': 'variables',
|
||||
'Verify Password': 'Zopakujte heslo',
|
||||
'Version': 'Verze',
|
||||
'Version %s.%s.%s (%s) %s': 'Verze %s.%s.%s (%s) %s',
|
||||
'Versioning': 'Verzování',
|
||||
'Videos': 'Videa',
|
||||
'View': 'Pohled (View)',
|
||||
'Views': 'Pohledy',
|
||||
'views': 'pohledy',
|
||||
'Web Framework': 'Web Framework',
|
||||
'web2py is up to date': 'Máte aktuální verzi web2py.',
|
||||
'web2py online debugger': 'Ladící online web2py program',
|
||||
'web2py Recent Tweets': 'Štěbetání na Twitteru o web2py',
|
||||
'web2py upgrade': 'web2py upgrade',
|
||||
'web2py upgraded; please restart it': 'web2py upgraded; please restart it',
|
||||
'Welcome': 'Vítejte',
|
||||
'Welcome to web2py': 'Vitejte ve web2py',
|
||||
'Welcome to web2py!': 'Vítejte ve web2py!',
|
||||
'Which called the function %s located in the file %s': 'která zavolala funkci %s v souboru (kontroléru) %s.',
|
||||
'You are successfully running web2py': 'Úspěšně jste spustili web2py.',
|
||||
'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'Nastavovat a mazat body přerušení je též možno v rámci editování zdrojového souboru přes tlačítko Vyp./Zap. bod přerušení',
|
||||
'You can modify this application and adapt it to your needs': 'Tuto aplikaci si můžete upravit a přizpůsobit ji svým potřebám.',
|
||||
'You need to set up and reach a': 'Je třeba nejprve nastavit a dojít až na',
|
||||
'You visited the url %s': 'Navštívili jste stránku %s,',
|
||||
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Aplikace bude blokována než se klikne na jedno z tlačítek (další, krok, pokračovat, atd.)',
|
||||
'You can inspect variables using the console bellow': 'Níže pomocí příkazové řádky si můžete prohlédnout proměnné',
|
||||
}
|
||||
|
||||
@@ -113,7 +113,9 @@
|
||||
'docs': 'docs',
|
||||
'done!': 'fatto!',
|
||||
'download layouts': 'download layouts',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'download plugins': 'download plugins',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'EDIT': 'MODIFICA',
|
||||
'Edit': 'modifica',
|
||||
'Edit application': 'Modifica applicazione',
|
||||
@@ -323,7 +325,7 @@
|
||||
'unable to uninstall "%s"': 'impossibile disinstallare "%s"',
|
||||
'unable to upgrade because "%s"': 'impossibile aggiornare perché "%s"',
|
||||
'uncheck all': 'smarca tutti',
|
||||
'Uninstall': 'disinstalla',
|
||||
'Uninstall': 'Disinstalla',
|
||||
'update': 'aggiorna',
|
||||
'update all languages': 'aggiorna tutti i linguaggi',
|
||||
'Update:': 'Aggiorna:',
|
||||
@@ -346,7 +348,7 @@
|
||||
'Versioning': 'Versioning',
|
||||
'View': 'Vista',
|
||||
'view': 'vista',
|
||||
'Views': 'viste',
|
||||
'Views': 'Viste',
|
||||
'views': 'viste',
|
||||
'Web Framework': 'Web Framework',
|
||||
'web2py is up to date': 'web2py è aggiornato',
|
||||
|
||||
@@ -2,46 +2,85 @@
|
||||
{
|
||||
'!langcode!': 'ja-jp',
|
||||
'!langname!': '日本語',
|
||||
'%Y-%m-%d': '%Y-%m-%d',
|
||||
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
|
||||
'%s %%{row} deleted': '%s rows deleted',
|
||||
'%s %%{row} updated': '%s rows updated',
|
||||
'%Y-%m-%d': '%Y-%m-%d',
|
||||
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
|
||||
'(requires internet access)': '(インターネットアクセスが必要)',
|
||||
'(something like "it-it")': '(例: "it-it")',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
|
||||
'@markmin\x01Searching: **%s** %%{file}': '検索中: **%s** ファイル',
|
||||
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 安全(HTTPS)な接続でログインするかlocalhostで実行されている必要があります。',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: テストはスレッドセーフではないので複数のテストを同時に実行しないでください。',
|
||||
'ATTENTION: you cannot edit the running application!': '注意: 実行中のアプリケーションは編集できません!',
|
||||
'Abort': '中断',
|
||||
'About': 'About',
|
||||
'About application': 'アプリケーションについて',
|
||||
'Additional code for your application': 'アプリケーションに必要な追加記述',
|
||||
'Admin language': '管理画面の言語',
|
||||
'administrative interface': '管理画面',
|
||||
'Administrator Password:': '管理者パスワード:',
|
||||
'and rename it:': 'ファイル名を変更:',
|
||||
'appadmin': 'アプリ管理画面',
|
||||
'application "%s" uninstalled': '"%s"アプリケーションが削除されました',
|
||||
'application compiled': 'アプリケーションがコンパイルされました',
|
||||
'Application name:': 'アプリケーション名:',
|
||||
'are not used': 'are not used',
|
||||
'are not used yet': 'are not used yet',
|
||||
'Are you sure you want to delete plugin "%s"?': '"%s"プラグインを削除してもよろしいですか?',
|
||||
'Are you sure you want to delete this object?': 'このオブジェクトを削除してもよろしいですか?',
|
||||
'Are you sure you want to uninstall application "%s"?': '"%s"アプリケーションを削除してもよろしいですか?',
|
||||
'arguments': '引数',
|
||||
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 安全(HTTPS)な接続でログインするかlocalhostで実行されている必要があります。',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: テストはスレッドセーフではないので複数のテストを同時に実行しないでください。',
|
||||
'ATTENTION: you cannot edit the running application!': '注意: 実行中のアプリケーションは編集できません!',
|
||||
'Available databases and tables': '利用可能なデータベースとテーブル一覧',
|
||||
'back': '戻る',
|
||||
'Basics': '基本情報',
|
||||
'Begin': '開始',
|
||||
'cache': 'cache',
|
||||
'cannot upload file "%(filename)s"': '"%(filename)s"ファイルをアップロードできません',
|
||||
'Change admin password': '管理者パスワード変更',
|
||||
'check all': '全てを選択',
|
||||
'Check for upgrades': '更新チェック',
|
||||
'Checking for upgrades...': '更新を確認中...',
|
||||
'Clean': '一時データ削除',
|
||||
'Click row to expand traceback': '列をクリックしてトレースバックを展開',
|
||||
'code': 'コード',
|
||||
'collapse/expand all': '全て開閉する',
|
||||
'Compile': 'コンパイル',
|
||||
'compiled application removed': 'コンパイル済みのアプリケーションが削除されました',
|
||||
'Controllers': 'コントローラ',
|
||||
'controllers': 'コントローラ',
|
||||
'Count': '回数',
|
||||
'Create': '作成',
|
||||
'create file with filename:': 'ファイル名:',
|
||||
'Create/Upload': 'Create/Upload',
|
||||
'created by': '作成者',
|
||||
'crontab': 'crontab',
|
||||
'currently running': '現在実行中',
|
||||
'currently saved or': '現在保存されているデータ または',
|
||||
'database administration': 'データベース管理',
|
||||
'db': 'db',
|
||||
'Debug': 'Debug',
|
||||
'defines tables': 'テーブル定義',
|
||||
'Delete': '削除',
|
||||
'delete all checked': '選択したデータを全て削除',
|
||||
'delete plugin': 'プラグイン削除',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'ファイルの削除(確認画面が出ます)',
|
||||
'Deploy': 'デプロイ',
|
||||
'Deploy on Google App Engine': 'Google App Engineにデプロイ',
|
||||
'design': 'デザイン',
|
||||
'Detailed traceback description': '詳細なトレースバック内容',
|
||||
'details': '詳細',
|
||||
'direction: ltr': 'direction: ltr',
|
||||
'Disable': '無効',
|
||||
'docs': 'ドキュメント',
|
||||
'download layouts': 'レイアウトのダウンロード',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'download plugins': 'プラグインのダウンロード',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'Edit': '編集',
|
||||
'edit all': '全て編集',
|
||||
'Edit application': 'アプリケーションを編集',
|
||||
'edit views:': 'ビューの編集:',
|
||||
'Editing file "%s"': '"%s"ファイルを編集中',
|
||||
'Enable': '有効',
|
||||
'Error': 'エラー',
|
||||
@@ -50,46 +89,83 @@
|
||||
'Error ticket': 'エラーチケット',
|
||||
'Errors': 'エラー',
|
||||
'Exception instance attributes': '例外インスタンス引数',
|
||||
'exposes': '公開',
|
||||
'exposes:': '公開:',
|
||||
'extends': '継承',
|
||||
'File': 'ファイル',
|
||||
'filter': 'フィルタ',
|
||||
'Frames': 'フレーム',
|
||||
'Functions with no doctests will result in [passed] tests.': 'doctestsのない関数は自動的にテストをパスします。',
|
||||
'Generate': 'アプリ生成',
|
||||
'Get from URL:': 'URLから取得:',
|
||||
'go!': '実行!',
|
||||
'graph model': 'graph model',
|
||||
'Help': 'ヘルプ',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'もし上記のレポートにチケット番号が含まれる場合は、doctestを実行する前に、コントローラの実行で問題があったことを示します。これはインデントの問題やその関数の外部で問題があった場合に起きるが一般的です。\n緑色のタイトルは全てのテスト(もし定義されていれば)をパスしたことを示します。その場合、テスト結果は表示されません。',
|
||||
'includes': 'インクルード',
|
||||
'index': 'index',
|
||||
'inspect attributes': '引数の検査',
|
||||
'Install': 'インストール',
|
||||
'Installed applications': 'アプリケーション一覧',
|
||||
'languages': '言語',
|
||||
'Languages': '言語',
|
||||
'Last saved on:': '最終保存日時:',
|
||||
'License for': 'License for',
|
||||
'loading...': 'ロードしています...',
|
||||
'locals': 'ローカル',
|
||||
'Login': 'ログイン',
|
||||
'Login to the Administrative Interface': '管理画面へログイン',
|
||||
'Logout': 'ログアウト',
|
||||
'models': 'モデル',
|
||||
'Models': 'モデル',
|
||||
'Modules': 'モジュール',
|
||||
'NO': 'いいえ',
|
||||
'modules': 'モジュール',
|
||||
'New Application Wizard': '新規アプリケーション作成ウィザード',
|
||||
'New application wizard': '新規アプリケーション作成ウィザード',
|
||||
'new plugin installed': '新しいプラグインがインストールされました',
|
||||
'New simple application': '新規アプリケーション',
|
||||
'NO': 'いいえ',
|
||||
'No databases in this application': 'このアプリケーションにはデータベースが存在しません',
|
||||
'no package selected': 'no package selected',
|
||||
'online designer': 'オンラインデザイナー',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Overwrite installed app': 'アプリケーションを上書き',
|
||||
'Pack all': 'パッケージ化',
|
||||
'Pack compiled': 'コンパイルデータのパッケージ化',
|
||||
'pack plugin': 'プラグインのパッケージ化',
|
||||
'Peeking at file': 'ファイルを参照',
|
||||
'plugin "%(plugin)s" deleted': '"%(plugin)s"プラグインは削除されました',
|
||||
'Plugin "%s" in application': '"%s"プラグイン',
|
||||
'Plugins': 'プラグイン',
|
||||
'plugins': 'プラグイン',
|
||||
'Plural-Forms:': 'Plural-Forms:',
|
||||
'Powered by': 'Powered by',
|
||||
'Private files': 'Private files',
|
||||
'private files': 'private files',
|
||||
'Reload routes': 'ルーティング再読み込み',
|
||||
'Remove compiled': 'コンパイルデータの削除',
|
||||
'request': 'リクエスト',
|
||||
'response': 'レスポンス',
|
||||
'restart': '最初からやり直し',
|
||||
'restore': '復元',
|
||||
'revert': '一つ前に戻す',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "このファイルのテストを実行(全てのファイルに対して実行する場合は、'テスト'というボタンを使用できます)",
|
||||
'Save': '保存',
|
||||
'Saved file hash:': '保存されたファイルハッシュ:',
|
||||
'session': 'セッション',
|
||||
'session expired': 'セッションの有効期限が切れました',
|
||||
'shell': 'shell',
|
||||
'Site': 'サイト',
|
||||
'skip to generate': 'スキップしてアプリ生成画面へ移動',
|
||||
'Sorry, could not find mercurial installed': 'インストールされているmercurialが見つかりません',
|
||||
'Start a new app': '新規アプリの作成',
|
||||
'Start wizard': 'ウィザードの開始',
|
||||
'state': 'state',
|
||||
'static': '静的ファイル',
|
||||
'Static': 'Static',
|
||||
'Static files': '静的ファイル',
|
||||
'Step': 'ステップ',
|
||||
'test': 'テスト',
|
||||
'Testing application': 'アプリケーションをテスト中',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'アプリケーションロジック、それぞれのURLパスはコントローラで公開されている各関数にマッピングされています',
|
||||
'The data representation, define database tables and sets': 'データの表示方法, テーブルとセットの定義',
|
||||
@@ -97,93 +173,34 @@
|
||||
'There are no controllers': 'コントローラがありません',
|
||||
'There are no modules': 'モジュールがありません',
|
||||
'There are no plugins': 'プラグインはありません',
|
||||
'There are no private files': 'There are no private files',
|
||||
'There are no translators, only default language is supported': '翻訳がないためデフォルト言語のみをサポートします',
|
||||
'There are no views': 'ビューがありません',
|
||||
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
|
||||
'These files are served without processing, your images go here': 'これらのファイルは直接参照されます, ここに画像が入ります',
|
||||
'Ticket ID': 'チケットID',
|
||||
'to previous version.': '前のバージョンへ戻す。',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'ファイル名/フォルダ名 plugin_[名称]としてプラグインを作成してください',
|
||||
'Traceback': 'トレースバック',
|
||||
'Translation strings for the application': 'アプリケーションの翻訳文字列',
|
||||
'Unable to download because:': '以下の理由でダウンロードできません:',
|
||||
'Uninstall': 'アプリ削除',
|
||||
'Upload a package:': 'パッケージをアップロード:',
|
||||
'Upload and install packed application': 'パッケージのアップロードとインストール',
|
||||
'Version': 'バージョン',
|
||||
'Versioning': 'バージョン管理',
|
||||
'Views': 'ビュー',
|
||||
'Web Framework': 'Web Framework',
|
||||
'YES': 'はい',
|
||||
'administrative interface': '管理画面',
|
||||
'and rename it:': 'ファイル名を変更:',
|
||||
'appadmin': 'アプリ管理画面',
|
||||
'application "%s" uninstalled': '"%s"アプリケーションが削除されました',
|
||||
'application compiled': 'アプリケーションがコンパイルされました',
|
||||
'arguments': '引数',
|
||||
'back': '戻る',
|
||||
'cache': 'cache',
|
||||
'cannot upload file "%(filename)s"': '"%(filename)s"ファイルをアップロードできません',
|
||||
'check all': '全てを選択',
|
||||
'code': 'コード',
|
||||
'collapse/expand all': '全て開閉する',
|
||||
'compiled application removed': 'コンパイル済みのアプリケーションが削除されました',
|
||||
'controllers': 'コントローラ',
|
||||
'create file with filename:': 'ファイル名:',
|
||||
'created by': '作成者',
|
||||
'crontab': 'crontab',
|
||||
'currently running': '現在実行中',
|
||||
'currently saved or': '現在保存されているデータ または',
|
||||
'database administration': 'データベース管理',
|
||||
'db': 'db',
|
||||
'defines tables': 'テーブル定義',
|
||||
'delete all checked': '選択したデータを全て削除',
|
||||
'delete plugin': 'プラグイン削除',
|
||||
'design': 'デザイン',
|
||||
'details': '詳細',
|
||||
'direction: ltr': 'direction: ltr',
|
||||
'docs': 'ドキュメント',
|
||||
'download layouts': 'レイアウトのダウンロード',
|
||||
'download plugins': 'プラグインのダウンロード',
|
||||
'edit all': '全て編集',
|
||||
'edit views:': 'ビューの編集:',
|
||||
'exposes': '公開',
|
||||
'exposes:': '公開:',
|
||||
'extends': '継承',
|
||||
'filter': 'フィルタ',
|
||||
'go!': '実行!',
|
||||
'includes': 'インクルード',
|
||||
'index': 'index',
|
||||
'inspect attributes': '引数の検査',
|
||||
'languages': '言語',
|
||||
'loading...': 'ロードしています...',
|
||||
'locals': 'ローカル',
|
||||
'models': 'モデル',
|
||||
'modules': 'モジュール',
|
||||
'new plugin installed': '新しいプラグインがインストールされました',
|
||||
'online designer': 'オンラインデザイナー',
|
||||
'pack plugin': 'プラグインのパッケージ化',
|
||||
'plugin "%(plugin)s" deleted': '"%(plugin)s"プラグインは削除されました',
|
||||
'plugins': 'プラグイン',
|
||||
'request': 'リクエスト',
|
||||
'response': 'レスポンス',
|
||||
'restart': '最初からやり直し',
|
||||
'restore': '復元',
|
||||
'revert': '一つ前に戻す',
|
||||
'session': 'セッション',
|
||||
'session expired': 'セッションの有効期限が切れました',
|
||||
'shell': 'shell',
|
||||
'skip to generate': 'スキップしてアプリ生成画面へ移動',
|
||||
'state': 'state',
|
||||
'static': '静的ファイル',
|
||||
'test': 'テスト',
|
||||
'to previous version.': '前のバージョンへ戻す。',
|
||||
'uncheck all': '全ての選択を解除',
|
||||
'Uninstall': 'アプリ削除',
|
||||
'update all languages': '全ての言語を更新',
|
||||
'upload': 'アップロード',
|
||||
'Upload': 'Upload',
|
||||
'Upload a package:': 'パッケージをアップロード:',
|
||||
'Upload and install packed application': 'パッケージのアップロードとインストール',
|
||||
'upload file:': 'ファイルをアップロード:',
|
||||
'upload plugin file:': 'プラグインファイルをアップロード:',
|
||||
'user': 'ユーザー',
|
||||
'variables': '変数',
|
||||
'Version': 'バージョン',
|
||||
'Versioning': 'バージョン管理',
|
||||
'Views': 'ビュー',
|
||||
'views': 'ビュー',
|
||||
'web2py Recent Tweets': '最近のweb2pyTweets',
|
||||
'Web Framework': 'Web Framework',
|
||||
'web2py is up to date': 'web2pyは最新です',
|
||||
'web2py Recent Tweets': '最近のweb2pyTweets',
|
||||
'YES': 'はい',
|
||||
}
|
||||
|
||||
@@ -0,0 +1,278 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'my-mm',
|
||||
'!langname!': 'မြန်မာ',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
|
||||
'%s %%{row} deleted': '%s %%{row} ဖျက်ပြီးပြီ',
|
||||
'%s %%{row} updated': '%s %%{row} ပြင်ပြီးပြီ',
|
||||
'%s selected': '%s ခု ရွေးထားသည်',
|
||||
'%Y-%m-%d': '%Y-%m-%d',
|
||||
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
|
||||
'(requires internet access, experimental)': '(requires internet access, experimental)',
|
||||
'(something like "it-it")': '(something like "it-it")',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
|
||||
'About': 'အကြောင်း',
|
||||
'Access Control': 'အသုံးပြု ခြင်းဆိုင်ရာ ထိန်းချုပ်ရန်',
|
||||
'Additional code for your application': 'Additional code for your application',
|
||||
'Admin language': 'Admin language',
|
||||
'administrative interface': 'administrative interface',
|
||||
'Administrative Interface': 'စီမံခန့်ခွဲရာ အင်တာဖေ့စ်',
|
||||
'Administrator Password:': 'Administrator Password:',
|
||||
'Ajax Recipes': 'Ajax Recipes',
|
||||
'and rename it:': 'and rename it:',
|
||||
'appadmin is disabled because insecure channel': 'စိတ်မချရသော လမ်းကြောင်းမှ ဝင်ရောက်သဖြင့် appadmin ကို အသုံးပြု၍ မရပါ',
|
||||
'Application name:': 'Application name:',
|
||||
'are not used': 'အသုံးမပြုပါ',
|
||||
'are not used yet': 'အသုံးမပြုသေးပါ',
|
||||
'Are you sure you want to delete this object?': 'သင် ဒီအရာ ဖျက်ရန် သေချာပါသလား။',
|
||||
'Available Databases and Tables': 'အသုံးပြုနိုင်သော ဒေတာဘေစ့်များနှင့် ဇယားများ',
|
||||
'Buy this book': 'ဒီစာအုပ်ကို ဝယ်ပါ',
|
||||
'cache': 'cache',
|
||||
'Cache': 'Cache',
|
||||
'Cache Keys': 'Cache Keys',
|
||||
'can be a git repo': 'can be a git repo',
|
||||
'Cannot be empty': 'အလွတ် မဖြစ်ရပါ',
|
||||
'Change admin password': 'Change admin password',
|
||||
'Check to delete': 'ဖျက်ရန် စစ်ဆေးပါ',
|
||||
'Checking for upgrades...': 'အဆင့်မြှင့်တင်မှုများအတွက် စစ်ဆေးနေသည် ...',
|
||||
'Clean': 'ရှင်းလင်းရန်',
|
||||
'Clear CACHE?': 'CACHE ကို ရှင်းလင်းမည်မှာ ဟုတ်ပါသလား။',
|
||||
'Clear DISK': 'DISK ကို ရှင်းလင်းမည်။',
|
||||
'Clear RAM': 'RAM ကို ရှင်းလင်းမည်။',
|
||||
'Client IP': 'Client IP',
|
||||
'collapse/expand all': 'collapse/expand all',
|
||||
'Community': 'အသိုင်းအဝိုင်း',
|
||||
'Compile': 'Compile',
|
||||
'Components and Plugins': 'Components and Plugins',
|
||||
'Controller': 'ကွန်ထရိုလာ',
|
||||
'Controllers': 'ကွန်ထရိုလာများ',
|
||||
'controllers': 'controllers',
|
||||
'Copyright': 'မူပိုင်ခွင့်',
|
||||
'Create': 'ဖန်တီးရန်',
|
||||
'create file with filename:': 'create file with filename:',
|
||||
'Create/Upload': 'Create/Upload',
|
||||
'created by': 'ဖန်းတီးသူ',
|
||||
'Created By': 'ပြုလုပ်ဖန်တီးသူ',
|
||||
'Created On': 'ပြုလုပ်ဖန်တီးသည့်အချိန်',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Current request',
|
||||
'Current response': 'Current response',
|
||||
'Current session': 'Current session',
|
||||
'currently running': 'လက်ရှိတွင် လုပ်ဆောင်နေသည်',
|
||||
'data uploaded': 'data uploaded',
|
||||
'Database': 'ဒေတာဘေစ့်',
|
||||
'Database %s select': 'Database %s select',
|
||||
'database administration': 'ဒေတာဘေ့(စ်) စီမံခန့်ခွဲခြင်း',
|
||||
'Database Administration (appadmin)': 'ဒေတာဘေစ့် စီမံခန့်ခွဲခြင်း (appadmin)',
|
||||
'db': 'db',
|
||||
'DB Model': 'DB Model',
|
||||
'Debug': 'အမှားရှာရန်',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
|
||||
'Delete:': 'Delete:',
|
||||
'Demo': 'အစမ်း၊ သရုပ်ပြမှုများ',
|
||||
'Deploy': 'Deploy',
|
||||
'Deploy on Google App Engine': 'Deploy on Google App Engine',
|
||||
'Deploy to OpenShift': 'Deploy to OpenShift',
|
||||
'Deployment Recipes': 'Deployment Recipes',
|
||||
'Description': 'ဖော်ပြချက်',
|
||||
'design': 'design',
|
||||
'direction: ltr': 'direction: ltr',
|
||||
'Disable': 'ပိတ်ရန်',
|
||||
'DISK': 'DISK',
|
||||
'Disk Cache Keys': 'Disk Cache Keys',
|
||||
'Disk Cleared': 'Disk ရှင်းလင်းပြီးပြီ',
|
||||
'Documentation': 'စာရွက်စာတမ်း အထောက်အကူများ',
|
||||
"Don't know what to do?": 'ဘာလုပ်ရမည်မသိ ဖြစ်နေပါသလား။',
|
||||
'done!': 'လုပ်ငန်း ဆောင်ရွက်ပြီးပြီ!',
|
||||
'Download': 'Download',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'E-mail': 'အီးမေးလ်',
|
||||
'Edit': 'ပြင်ဆင်ရန်',
|
||||
'Edit application': 'Application ကို ပြင်ရန်',
|
||||
'Edit current record': 'လက်ရှိ မှတ်တမ်းကို ပြင်ရန်',
|
||||
'Email and SMS': 'အီးမေးလ်နှင့် SMS',
|
||||
'Enable': 'ဖွင့်ရန်',
|
||||
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
|
||||
'Errors': 'အမှားများ',
|
||||
'export as csv file': ' csv file အနေနဲ့ ထုတ်ပေးရန်',
|
||||
'exposes': 'exposes',
|
||||
'extends': 'extends',
|
||||
'FAQ': 'ဖြစ်လေ့ရှိသော ပြဿနာများ',
|
||||
'filter': 'filter',
|
||||
'First name': 'အမည်၏ ပထမဆုံး စာလုံး',
|
||||
'Forms and Validators': 'Forms and Validators',
|
||||
'Free Applications': 'အခမဲ့ Applications',
|
||||
'graph model': 'graph model',
|
||||
'Graph Model': 'Graph Model',
|
||||
'Group ID': 'Group ID',
|
||||
'Groups': 'အဖွဲ့များ',
|
||||
'Hello World': 'မင်္ဂလာပါ ကမ္ဘာကြီး။',
|
||||
'Help': 'အကူအညီ',
|
||||
'Home': 'မူလသို့',
|
||||
'How did you get here?': 'သင် ဘယ်လို ရောက်လာခဲ့သလဲ။',
|
||||
'import': 'သွင်းယူရန်',
|
||||
'Import/Export': 'သွင်းယူရန်/ထုတ်ယူရန်',
|
||||
'includes': 'includes',
|
||||
'Install': 'Install',
|
||||
'Installed applications': 'ထည့်သွင်းပြီး application များ',
|
||||
'Internal State': 'Internal State',
|
||||
'Introduction': 'မိတ်ဆက်',
|
||||
'Invalid email': 'အီးမေးလ် ဖြည့်သွင်းမှုမှားနေသည်',
|
||||
'Invalid Query': 'Invalid Query',
|
||||
'invalid request': 'invalid request',
|
||||
'Is Active': 'Is Active',
|
||||
'Key': 'Key',
|
||||
'Language': 'ဘာသာစကား',
|
||||
'languages': 'ဘာသာစကားများ',
|
||||
'Languages': 'ဘာသာစကားများ',
|
||||
'Last name': 'မျိုးနွယ်အမည်',
|
||||
'Layout': 'အပြင်အဆင်',
|
||||
'Layout Plugins': 'Layout Plugins',
|
||||
'Layouts': 'အပြင်အဆင်များ',
|
||||
'Live Chat': 'တိုက်ရိုက် ဆက်သွယ် ပြောကြားရန်',
|
||||
'Login': 'ဝင်ရောက်အသုံးပြုရန်',
|
||||
'Login to the Administrative Interface': 'Login to the Administrative Interface',
|
||||
'Logout': 'ထွက်ရန်',
|
||||
'Lost Password': 'စကားဝှက် မသိတော့ပါ',
|
||||
'Lost password?': 'စကားဝှက် မသိတော့ဘူးလား။',
|
||||
'Manage': 'စီမံခန့်ခွဲရန်',
|
||||
'Manage %(action)s': '%(action)s ကို စီမံရန်',
|
||||
'Manage Access Control': 'အသုံးပြုခြင်းဆိုင်ရာ ထိန်းချုပ်မှု စီမံခန့်ခွဲရန်',
|
||||
'Manage Cache': 'Manage Cache',
|
||||
'Memberships': 'အသင်းဝင်များ',
|
||||
'Menu Model': 'Menu Model',
|
||||
'models': 'models',
|
||||
'Models': 'Models',
|
||||
'Modified By': 'ပြင်ဆင်မွမ်းမံသူ',
|
||||
'Modified On': 'ပြင်ဆင်မွမ်းမံသည့် အချိန်',
|
||||
'Modules': 'Modules',
|
||||
'modules': 'modules',
|
||||
'My Sites': 'ကျွန်ုပ်၏ Site များ',
|
||||
'Name': 'အမည်',
|
||||
'New application wizard': 'New application wizard',
|
||||
'New Record': 'မှတ်တမ်း အသစ်',
|
||||
'new record inserted': 'မှတ်တမ်း အသစ် ဖြည့်သွင်းပြီးပြီ',
|
||||
'New simple application': 'ရိုးရိုး application အသစ်',
|
||||
'next %s rows': 'နောက်အတန်း %s တန်း',
|
||||
'No databases in this application': 'ဒီ application တွင် မည်သည့် ဒေတာဘေစ့်မှ မရှိပါ',
|
||||
'no package selected': 'no package selected',
|
||||
'Object or table name': 'Object or table name',
|
||||
'Online examples': 'အွန်လိုင်း နမူနာများ',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Or Get from URL:',
|
||||
'or import from csv file': 'or import from csv file',
|
||||
'Origin': 'မူလ အစ',
|
||||
'Other Plugins': 'အခြား Plugins',
|
||||
'Other Recipes': 'အခြား Recipes',
|
||||
'Overview': 'အပေါ်ယံရှုမြင်ခြင်း',
|
||||
'Overwrite installed app': 'Overwrite installed app',
|
||||
'Pack all': 'အားလုံးကို ထုပ်ပိုးရန်',
|
||||
'Pack custom': 'ရွေးချယ်ထုပ်ပိုးရန်',
|
||||
'Password': 'စကားဝှက်',
|
||||
"Password fields don't match": 'စကားဝှက်များ ကိုက်ညီမှု မရှိပါ',
|
||||
'Permission': 'ခွင့်ပြုချက်',
|
||||
'Permissions': 'ခွင့်ပြုချက်များ',
|
||||
'please input your password again': 'ကျေးဇူးပြု၍ စကားဝှက်ကို ထပ်မံ ဖြည့်သွင်းပေးပါ',
|
||||
'Plugins': 'Plugins',
|
||||
'plugins': 'plugins',
|
||||
'Plural-Forms:': 'Plural-Forms:',
|
||||
'Powered by': 'အားဖြည့်စွမ်းအားပေးသူ',
|
||||
'Preface': 'နိဒါန်း',
|
||||
'previous %s rows': 'previous %s rows',
|
||||
'Private files': 'Private files',
|
||||
'private files': 'private files',
|
||||
'pygraphviz library not found': 'pygraphviz library ကို မတွေ့ပါ',
|
||||
'Python': 'Python',
|
||||
'Query:': 'Query:',
|
||||
'Quick Examples': 'အမြန် အသုံးပြုနိုင်သော နမူနာများ',
|
||||
'RAM': 'RAM',
|
||||
'RAM Cache Keys': 'RAM Cache Keys',
|
||||
'Ram Cleared': 'Ram ရှင်းလင်းပြီးပြီ',
|
||||
'Recipes': 'Recipes',
|
||||
'Record': 'မှတ်တမ်း',
|
||||
'record does not exist': 'မှတ်တမ်း မရှိပါ',
|
||||
'Record ID': 'Record ID',
|
||||
'Record id': 'Record id',
|
||||
'Register': 'မှတ်ပုံတင်ရန်',
|
||||
'Registration identifier': 'Registration identifier',
|
||||
'Registration key': 'Registration key',
|
||||
'Reload routes': 'Reload routes',
|
||||
'Remember me (for 30 days)': 'Remember me (for 30 days)',
|
||||
'Request reset password': 'စကားဝှက် အသစ် တောင်းဆိုရန်',
|
||||
'Reset Password key': 'Reset Password key',
|
||||
'Role': 'Role',
|
||||
'Roles': 'Roles',
|
||||
'Rows in Table': 'Rows in Table',
|
||||
'Rows selected': 'ရွေးထားသော အတန်းများ',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
|
||||
'Running on %s': 'Running on %s',
|
||||
'Save model as...': 'Save model as...',
|
||||
'Semantic': 'Semantic',
|
||||
'Services': 'Services',
|
||||
'shell': 'shell',
|
||||
'Site': 'Site',
|
||||
'Size of cache:': 'Size of cache:',
|
||||
'Start wizard': 'Start wizard',
|
||||
'state': 'state',
|
||||
'static': 'static',
|
||||
'Static': 'Static',
|
||||
'Statistics': 'ကိန်းဂဏန်း အချက်အလက်များ',
|
||||
'Stylesheet': 'Stylesheet',
|
||||
'submit': 'ပြုလုပ်ပါ',
|
||||
'Submit': 'Submit',
|
||||
'Support': 'အထောက်အပံ့',
|
||||
'Table': 'ဇယား',
|
||||
'test': 'test',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
|
||||
'The Core': 'The Core',
|
||||
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
|
||||
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
|
||||
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
|
||||
'The Views': 'The Views',
|
||||
'There are no plugins': 'There are no plugins',
|
||||
'There are no private files': 'There are no private files',
|
||||
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
|
||||
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
|
||||
'This App': 'ဒီ App',
|
||||
'This email already has an account': 'ဒီအီးမေးလ်တွင် အကောင့် ရှိပြီး ဖြစ်ပါသည်',
|
||||
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
|
||||
'Timestamp': 'Timestamp',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'To create a plugin, name a file/folder plugin_[name]',
|
||||
'Traceback': 'Traceback',
|
||||
'Translation strings for the application': 'Translation strings for the application',
|
||||
'Try the mobile interface': 'Try the mobile interface',
|
||||
'Twitter': 'Twitter',
|
||||
'unable to parse csv file': 'unable to parse csv file',
|
||||
'Uninstall': 'Uninstall',
|
||||
'update all languages': 'update all languages',
|
||||
'Update:': 'Update:',
|
||||
'Upload': 'Upload',
|
||||
'Upload a package:': 'Upload a package:',
|
||||
'Upload and install packed application': 'Upload and install packed application',
|
||||
'upload file:': 'upload file:',
|
||||
'upload plugin file:': 'upload plugin file:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
|
||||
'User': 'အသုံးပြုသူ',
|
||||
'User ID': 'User ID',
|
||||
'Users': 'အသုံးပြုသူများ',
|
||||
'Verify Password': 'စကားဝှက်ကို အတည်ပြုပါ',
|
||||
'Version': 'Version',
|
||||
'Versioning': 'Versioning',
|
||||
'Videos': 'ဗွီဒီယိုများ',
|
||||
'View': 'ဗျူး',
|
||||
'views': 'views',
|
||||
'Views': 'ဗျူးများ',
|
||||
'Web Framework': 'Web Framework',
|
||||
'Welcome': 'ကြိုဆိုပါ၏',
|
||||
'Welcome to web2py!': 'web2py မှ ကြိုဆိုပါသည်။',
|
||||
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
|
||||
'Working...': 'ဆောင်ရွက်နေပါသည် ။ ။ ။',
|
||||
'You are successfully running web2py': 'သင်သည် web2py ကို အောင်မြင်စွာ လည်ပတ်မောင်းနှင်စေပါသည်။',
|
||||
'You can modify this application and adapt it to your needs': 'သင် ဒီ application ကို ပြုပြင်မွမ်းမံနိုင်ပါသည်။ ထို့အပြင် သင့်လိုအပ်ချက်များနှင့် ကိုက်ညီစေရန် ပြုလုပ်နိုင်ပါသည်။',
|
||||
'You visited the url %s': 'သင် လည်ပတ်ခဲ့သော URL %s',
|
||||
'စကားဝှက် အသစ် တောင်းဆိုရန်': 'စကားဝှက် အသစ် တောင်းဆိုရန်',
|
||||
'မှတ်ပုံတင်ရန်': 'မှတ်ပုံတင်ရန်',
|
||||
'ဝင်ရောက်အသုံးပြုရန်': 'ဝင်ရောက်အသုံးပြုရန်',
|
||||
}
|
||||
@@ -7,8 +7,10 @@
|
||||
'%s %%{row} updated': '%s registros atualizados',
|
||||
'%Y-%m-%d': '%d/%m/%Y',
|
||||
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
|
||||
'(requires internet access)': '(requer acesso a internet)',
|
||||
'(requires internet access)': '(requer acesso à internet)',
|
||||
'(requires internet access, experimental)': '(requer acesso à internet, experimental)',
|
||||
'(something like "it-it")': '(algo como "it-it")',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(file **gluon/contrib/plural_rules/%s.py** is not found)',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
|
||||
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
|
||||
'A new version of web2py is available': 'Está disponível uma nova versão do web2py',
|
||||
@@ -16,7 +18,7 @@
|
||||
'About': 'sobre',
|
||||
'About application': 'Sobre a aplicação',
|
||||
'additional code for your application': 'código adicional para sua aplicação',
|
||||
'Additional code for your application': 'Additional code for your application',
|
||||
'Additional code for your application': 'Código adicional para a sua aplicação',
|
||||
'admin disabled because no admin password': ' admin desabilitado por falta de senha definida',
|
||||
'admin disabled because not supported on google app engine': 'admin dehabilitado, não é soportado no GAE',
|
||||
'admin disabled because unable to access password file': 'admin desabilitado, não foi possível ler o arquivo de senha',
|
||||
@@ -33,6 +35,8 @@
|
||||
'application compiled': 'aplicação compilada',
|
||||
'application is compiled and cannot be designed': 'A aplicação está compilada e não pode ser modificada',
|
||||
'Application name:': 'Nome da aplicação:',
|
||||
'are not used': 'não usadas',
|
||||
'are not used yet': 'ainda não usadas',
|
||||
'Are you sure you want to delete file "%s"?': 'Tem certeza que deseja apagar o arquivo "%s"?',
|
||||
'Are you sure you want to delete plugin "%s"?': 'Tem certeza que deseja apagar o plugin "%s"?',
|
||||
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
|
||||
@@ -43,17 +47,20 @@
|
||||
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENÇÃO o login requer uma conexão segura (HTTPS) ou executar de localhost.',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENÇÃO OS TESTES NÃO THREAD SAFE, NÃO EFETUE MÚLTIPLOS TESTES AO MESMO TEMPO.',
|
||||
'ATTENTION: you cannot edit the running application!': 'ATENÇÃO: Não pode modificar a aplicação em execução!',
|
||||
'Autocomplete Python Code': 'Autocompletar Código Python',
|
||||
'Available databases and tables': 'Bancos de dados e tabelas disponíveis',
|
||||
'back': 'voltar',
|
||||
'browse': 'buscar',
|
||||
'cache': 'cache',
|
||||
'cache, errors and sessions cleaned': 'cache, erros e sessões eliminadas',
|
||||
'can be a git repo': 'can be a git repo',
|
||||
'Cannot be empty': 'Não pode ser vazio',
|
||||
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Não é possível compilar: Existem erros em sua aplicação. Depure, corrija os errros e tente novamente',
|
||||
'Cannot compile: there are errors in your app:': 'Não é possível compilar: Existem erros em sua aplicação',
|
||||
'cannot create file': 'Não é possível criar o arquivo',
|
||||
'cannot upload file "%(filename)s"': 'não é possível fazer upload do arquivo "%(filename)s"',
|
||||
'Change admin password': 'mudar senha de administrador',
|
||||
'change editor settings': 'mudar definições do editor',
|
||||
'Change Password': 'Trocar Senha',
|
||||
'check all': 'marcar todos',
|
||||
'Check for upgrades': 'checar por atualizações',
|
||||
@@ -67,7 +74,7 @@
|
||||
'click to open': 'clique para abrir',
|
||||
'Client IP': 'IP do cliente',
|
||||
'code': 'código',
|
||||
'collapse/expand all': 'collapse/expand all',
|
||||
'collapse/expand all': 'colapsar/expandir tudo',
|
||||
'commit (mercurial)': 'commit (mercurial)',
|
||||
'Compile': 'compilar',
|
||||
'compiled application removed': 'aplicação compilada removida',
|
||||
@@ -79,6 +86,7 @@
|
||||
'Create new application using the Wizard': 'Criar nova aplicação utilizando o assistente',
|
||||
'create new application:': 'nome da nova aplicação:',
|
||||
'Create new simple application': 'Crie uma nova aplicação',
|
||||
'Create/Upload': 'Create/Upload',
|
||||
'created by': 'criado por',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Requisição atual',
|
||||
@@ -99,18 +107,24 @@
|
||||
'delete': 'apagar',
|
||||
'delete all checked': 'apagar marcados',
|
||||
'delete plugin': 'apagar plugin',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
|
||||
'Delete:': 'Apague:',
|
||||
'Deploy': 'publicar',
|
||||
'Deploy on Google App Engine': 'Publicar no Google App Engine',
|
||||
'Deploy to OpenShift': 'Deploy to OpenShift',
|
||||
'Description': 'Descrição',
|
||||
'DESIGN': 'Projeto',
|
||||
'design': 'modificar',
|
||||
'DESIGN': 'Projeto',
|
||||
'Design for': 'Projeto de',
|
||||
'Detailed traceback description': 'Detailed traceback description',
|
||||
'direction: ltr': 'direção: ltr',
|
||||
'Disable': 'Disable',
|
||||
'docs': 'docs',
|
||||
'done!': 'feito!',
|
||||
'download layouts': 'download layouts',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'download plugins': 'download plugins',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'E-mail': 'E-mail',
|
||||
'EDIT': 'EDITAR',
|
||||
'Edit': 'editar',
|
||||
@@ -119,6 +133,7 @@
|
||||
'Edit current record': 'Editar o registro atual',
|
||||
'Edit Profile': 'Editar Perfil',
|
||||
'edit views:': 'editar visões:',
|
||||
'Editing %s': 'A Editar %s',
|
||||
'Editing file': 'Editando arquivo',
|
||||
'Editing file "%s"': 'Editando arquivo "%s"',
|
||||
'Editing Language file': 'Editando arquivo de linguagem',
|
||||
@@ -129,6 +144,8 @@
|
||||
'Error ticket': 'Error ticket',
|
||||
'Errors': 'erros',
|
||||
'Exception instance attributes': 'Atributos da instancia de excessão',
|
||||
'Exit Fullscreen': 'Sair de Ecrã Inteiro',
|
||||
'Expand Abbreviation (html files only)': 'Expandir Abreviação (só para ficheiros html)',
|
||||
'export as csv file': 'exportar como arquivo CSV',
|
||||
'exposes': 'expõe',
|
||||
'extends': 'estende',
|
||||
@@ -144,20 +161,24 @@
|
||||
'file does not exist': 'arquivo não existe',
|
||||
'file saved on %(time)s': 'arquivo salvo em %(time)s',
|
||||
'file saved on %s': 'arquivo salvo em %s',
|
||||
'filter': 'filter',
|
||||
'filter': 'filtro',
|
||||
'Find Next': 'Localizar Seguinte',
|
||||
'Find Previous': 'Localizar Anterior',
|
||||
'First name': 'Nome',
|
||||
'Frames': 'Frames',
|
||||
'Functions with no doctests will result in [passed] tests.': 'Funções sem doctests resultarão em testes [aceitos].',
|
||||
'graph model': 'graph model',
|
||||
'Group ID': 'ID do Grupo',
|
||||
'Hello World': 'Olá Mundo',
|
||||
'Help': 'ajuda',
|
||||
'Hide/Show Translated strings': '',
|
||||
'htmledit': 'htmledit',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Se o relatório acima contém um número de ticket, isso indica uma falha no controlador em execução, antes de tantar executar os doctests. Isto acontece geralmente por erro de endentação ou erro fora do código da função.\nO titulo em verde indica que os testes (se definidos) passaram. Neste caso os testes não são mostrados.',
|
||||
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Se o relatório acima contém um número de ticket, isso indica uma falha no controlador em execução, antes de tantar executar os doctests. Isto acontece geralmente por erro de endentação ou erro fora do código da função.\r\nO titulo em verde indica que os testes (se definidos) passaram. Neste caso os testes não são mostrados.',
|
||||
'Import/Export': 'Importar/Exportar',
|
||||
'includes': 'inclui',
|
||||
'insert new': 'inserir novo',
|
||||
'insert new %s': 'inserir novo %s',
|
||||
'inspect attributes': 'inspect attributes',
|
||||
'inspect attributes': 'inspecionar atributos',
|
||||
'Install': 'instalar',
|
||||
'Installed applications': 'Aplicações instaladas',
|
||||
'internal error': 'erro interno',
|
||||
@@ -168,6 +189,7 @@
|
||||
'Invalid Query': 'Consulta inválida',
|
||||
'invalid request': 'solicitação inválida',
|
||||
'invalid ticket': 'ticket inválido',
|
||||
'Keyboard shortcuts': 'Atalhos de teclado',
|
||||
'language file "%(filename)s" created/updated': 'arquivo de linguagem "%(filename)s" criado/atualizado',
|
||||
'Language files (static strings) updated': 'Arquivos de linguagem (textos estáticos) atualizados',
|
||||
'languages': 'linguagens',
|
||||
@@ -178,11 +200,12 @@
|
||||
'License for': 'Licença para',
|
||||
'loading...': 'carregando...',
|
||||
'locals': 'locals',
|
||||
'login': 'inicio de sessão',
|
||||
'Login': 'Entrar',
|
||||
'login': 'inicio de sessão',
|
||||
'Login to the Administrative Interface': 'Entrar na interface adminitrativa',
|
||||
'Logout': 'finalizar sessão',
|
||||
'Lost Password': 'Senha perdida',
|
||||
'Manage': 'Manage',
|
||||
'manage': 'gerenciar',
|
||||
'merge': 'juntar',
|
||||
'Models': 'Modelos',
|
||||
@@ -200,7 +223,10 @@
|
||||
'NO': 'NÃO',
|
||||
'No databases in this application': 'Não existem bancos de dados nesta aplicação',
|
||||
'no match': 'não encontrado',
|
||||
'no package selected': 'no package selected',
|
||||
'no package selected': 'nenhum pacote selecionado',
|
||||
'online designer': 'online designer',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Ou Obtenha do URL:',
|
||||
'or import from csv file': 'ou importar de um arquivo CSV',
|
||||
'or provide app url:': 'ou forneça a url de uma aplicação:',
|
||||
'or provide application url:': 'ou forneça a url de uma aplicação:',
|
||||
@@ -209,6 +235,7 @@
|
||||
'Overwrite installed app': 'sobrescrever aplicação instalada',
|
||||
'Pack all': 'criar pacote',
|
||||
'Pack compiled': 'criar pacote compilado',
|
||||
'Pack custom': 'Pack custom',
|
||||
'pack plugin': 'empacotar plugin',
|
||||
'PAM authenticated user, cannot change password here': 'usuario autenticado por PAM, não pode alterar a senha por aqui',
|
||||
'Password': 'Senha',
|
||||
@@ -218,16 +245,23 @@
|
||||
'Plugin "%s" in application': 'Plugin "%s" na aplicação',
|
||||
'plugins': 'plugins',
|
||||
'Plugins': 'Plugins',
|
||||
'Plural-Forms:': 'Plural-Forms:',
|
||||
'Powered by': 'Este site utiliza',
|
||||
'previous 100 rows': '100 registros anteriores',
|
||||
'Private files': 'Private files',
|
||||
'private files': 'private files',
|
||||
'Query:': 'Consulta:',
|
||||
'Rapid Search': 'Rapid Search',
|
||||
'record': 'registro',
|
||||
'record does not exist': 'o registro não existe',
|
||||
'record id': 'id do registro',
|
||||
'Record ID': 'ID do Registro',
|
||||
'Register': 'Registrar-se',
|
||||
'Registration key': 'Chave de registro',
|
||||
'Reload routes': 'Reload routes',
|
||||
'Remove compiled': 'eliminar compilados',
|
||||
'Replace': 'Substituir',
|
||||
'Replace All': 'Substituir Tudo',
|
||||
'request': 'request',
|
||||
'Resolve Conflict file': 'Arquivo de resolução de conflito',
|
||||
'response': 'response',
|
||||
@@ -236,7 +270,14 @@
|
||||
'Role': 'Papel',
|
||||
'Rows in table': 'Registros na tabela',
|
||||
'Rows selected': 'Registros selecionados',
|
||||
'rules are not defined': 'rules are not defined',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
|
||||
'Running on %s': 'A correr em %s',
|
||||
'Save': 'Save',
|
||||
'save': 'salvar',
|
||||
'Save file:': 'Gravar ficheiro:',
|
||||
'Save file: %s': 'Gravar ficheiro: %s',
|
||||
'Save via Ajax': 'Gravar via Ajax',
|
||||
'Saved file hash:': 'Hash do arquivo salvo:',
|
||||
'selected': 'selecionado(s)',
|
||||
'session': 'session',
|
||||
@@ -244,10 +285,13 @@
|
||||
'shell': 'Terminal',
|
||||
'Site': 'site',
|
||||
'some files could not be removed': 'alguns arquicos não puderam ser removidos',
|
||||
'Start searching': 'Start searching',
|
||||
'Start wizard': 'iniciar assistente',
|
||||
'state': 'estado',
|
||||
'Static': 'Static',
|
||||
'static': 'estáticos',
|
||||
'Static files': 'Arquivos estáticos',
|
||||
'Submit': 'Submit',
|
||||
'submit': 'enviar',
|
||||
'Sure you want to delete this object?': 'Tem certeza que deseja apaagr este objeto?',
|
||||
'table': 'tabela',
|
||||
@@ -255,21 +299,23 @@
|
||||
'test': 'testar',
|
||||
'Testing application': 'Testando a aplicação',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "consulta" é uma condição como "db.tabela.campo1==\'valor\'". Algo como "db.tabela1.campo1==db.tabela2.campo2" resulta em um JOIN SQL.',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
|
||||
'the application logic, each URL path is mapped in one exposed function in the controller': 'A lógica da aplicação, cada URL é mapeada para uma função exposta pelo controlador',
|
||||
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
|
||||
'the data representation, define database tables and sets': 'A representação dos dadps, define tabelas e estruturas de dados',
|
||||
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
|
||||
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
|
||||
'the presentations layer, views are also known as templates': 'A camada de apresentação, As visões também são chamadas de templates',
|
||||
'There are no controllers': 'Não existem controllers',
|
||||
'There are no models': 'Não existem modelos',
|
||||
'There are no modules': 'Não existem módulos',
|
||||
'There are no plugins': 'There are no plugins',
|
||||
'There are no private files': '',
|
||||
'There are no static files': 'Não existem arquicos estáticos',
|
||||
'There are no translators, only default language is supported': 'Não há traduções, somente a linguagem padrão é suportada',
|
||||
'There are no views': 'Não existem visões',
|
||||
'these files are served without processing, your images go here': 'Estes arquivos são servidos sem processamento, suas imagens ficam aqui',
|
||||
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
|
||||
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
|
||||
'these files are served without processing, your images go here': 'Estes arquivos são servidos sem processamento, suas imagens ficam aqui',
|
||||
'This is the %(filename)s template': 'Este é o template %(filename)s',
|
||||
'Ticket': 'Ticket',
|
||||
'Ticket ID': 'Ticket ID',
|
||||
@@ -277,11 +323,15 @@
|
||||
'TM': 'MR',
|
||||
'to previous version.': 'para a versão anterior.',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'Para criar um plugin, nomeio um arquivo/pasta como plugin_[nome]',
|
||||
'toggle breakpoint': 'toggle breakpoint',
|
||||
'Toggle comment': 'Toggle comment',
|
||||
'Toggle Fullscreen': 'Toggle Fullscreen',
|
||||
'Traceback': 'Traceback',
|
||||
'translation strings for the application': 'textos traduzidos para a aplicação',
|
||||
'Translation strings for the application': 'Translation strings for the application',
|
||||
'try': 'tente',
|
||||
'try something like': 'tente algo como',
|
||||
'Try the mobile interface': 'Try the mobile interface',
|
||||
'Unable to check for upgrades': 'Não é possível checar as atualizações',
|
||||
'unable to create application "%s"': 'não é possível criar a aplicação "%s"',
|
||||
'unable to delete file "%(filename)s"': 'não é possível criar o arquico "%(filename)s"',
|
||||
@@ -300,8 +350,10 @@
|
||||
'Update:': 'Atualizar:',
|
||||
'upgrade web2py now': 'atualize o web2py agora',
|
||||
'upload': 'upload',
|
||||
'Upload': 'Upload',
|
||||
'Upload & install packed application': 'Faça upload e instale uma aplicação empacotada',
|
||||
'Upload a package:': 'Faça upload de um pacote:',
|
||||
'Upload and install packed application': 'Upload and install packed application',
|
||||
'upload application:': 'Fazer upload de uma aplicação:',
|
||||
'Upload existing application': 'Faça upload de uma aplicação existente',
|
||||
'upload file:': 'Enviar arquivo:',
|
||||
|
||||
@@ -47,5 +47,3 @@ if 'adminLanguage' in request.cookies and not (request.cookies['adminLanguage']
|
||||
#set static_version
|
||||
from gluon.settings import global_settings
|
||||
response.static_version = global_settings.web2py_version.split('-')[0]
|
||||
|
||||
|
||||
|
||||
@@ -34,4 +34,3 @@ else:
|
||||
URL(_a, 'default', f='logout')))
|
||||
response.menu.append((T('Debug'), False,
|
||||
URL(_a, 'debug', 'interact')))
|
||||
|
||||
|
||||
@@ -7,11 +7,10 @@ def stateWidget(field, value, data={'on-label':'Enabled', 'off-label':'Disabled'
|
||||
except:
|
||||
fieldName = field
|
||||
|
||||
div = DIV(INPUT( _type='checkbox', _name='%s' % fieldName, _checked= 'checked' if value == 'true' else None, _value='true'),
|
||||
_class='make-bootstrap-switch',
|
||||
div = DIV(INPUT( _type='checkbox', _name='%s' % fieldName, _checked= 'checked' if value == 'true' else None, _value='true'),
|
||||
_class='make-bootstrap-switch',
|
||||
data=data)
|
||||
script = SCRIPT("""
|
||||
jQuery(".make-bootstrap-switch input[name='%s']").parent().bootstrapSwitch();
|
||||
""" % fieldName)
|
||||
return DIV(div, script)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,35 +12,45 @@
|
||||
{{if request.function=='index':}}
|
||||
<h2>{{=T("Available Databases and Tables")}}</h2>
|
||||
{{if not databases:}}{{=T("No databases in this application")}}{{pass}}
|
||||
<table>
|
||||
{{for db in sorted(databases):}}
|
||||
{{for table in databases[db].tables:}}
|
||||
{{qry='%s.%s.id>0'%(db,table)}}
|
||||
{{tbl=databases[db][table]}}
|
||||
{{if hasattr(tbl,'_primarykey'):}}
|
||||
{{if tbl._primarykey:}}
|
||||
{{firstkey=tbl[tbl._primarykey[0]]}}
|
||||
{{if firstkey.type in ['string','text']:}}
|
||||
{{qry='%s.%s.%s!=""'%(db,table,firstkey.name)}}
|
||||
{{else:}}
|
||||
{{qry='%s.%s.%s>0'%(db,table,firstkey.name)}}
|
||||
{{pass}}
|
||||
{{else:}}
|
||||
{{qry=''}}
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
<tr>
|
||||
<th style="font-size: 1.75em;">
|
||||
{{=A("%s.%s" % (db,table),_href=URL('select',args=[db],vars=dict(query=qry)))}}
|
||||
</th>
|
||||
<td>
|
||||
{{=A(str(T('New Record')),_href=URL('insert',args=[db,table]),_class="btn")}}
|
||||
</td>
|
||||
</tr>
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
</table>
|
||||
|
||||
<ul class="nav nav-tabs" id="myTab">
|
||||
<li class="active" ><a href="#alltables" data-toggle="tab">Tables</a></li>
|
||||
<li><a href="#hooks" data-toggle="tab">Hooks</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div class="tab-pane active" id="alltables">
|
||||
<table>
|
||||
{{for db in sorted(databases):}}
|
||||
{{for table in databases[db].tables:}}
|
||||
{{qry='%s.%s.id>0'%(db,table)}}
|
||||
{{tbl=databases[db][table]}}
|
||||
{{if hasattr(tbl,'_primarykey'):}}
|
||||
{{if tbl._primarykey:}}
|
||||
{{firstkey=tbl[tbl._primarykey[0]]}}
|
||||
{{if firstkey.type in ['string','text']:}}
|
||||
{{qry='%s.%s.%s!=""'%(db,table,firstkey.name)}}
|
||||
{{else:}}
|
||||
{{qry='%s.%s.%s>0'%(db,table,firstkey.name)}}
|
||||
{{pass}}
|
||||
{{else:}}
|
||||
{{qry=''}}
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
<tr>
|
||||
<th style="font-size: 1.75em;">
|
||||
{{=A("%s.%s" % (db,table),_href=URL('select',args=[db],vars=dict(query=qry)))}}
|
||||
</th>
|
||||
<td>
|
||||
{{=A(str(T('New Record')),_href=URL('insert',args=[db,table]),_class="btn")}}
|
||||
</td>
|
||||
</tr>
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
</table>
|
||||
</div>
|
||||
<div class="tab-pane" id="hooks">
|
||||
{{=LOAD('appadmin', 'hooks', ajax=True)}}
|
||||
</div>
|
||||
</div>
|
||||
{{elif request.function=='select':}}
|
||||
<h2>{{=XML(str(T("Database %s select"))%A(request.args[0],_href=URL('index'))) }}
|
||||
</h2>
|
||||
|
||||
@@ -258,12 +258,16 @@ $(document).on('click', 'a.font_button', function (e) {
|
||||
</div>
|
||||
</div>
|
||||
<section id="windows_divs" class="tab-content ">
|
||||
<div id="window_todo" class="tab-pane container-fluid">
|
||||
{{=LOAD('default', 'todolist.load', vars={'app':app}, ajax=True, timeout=60000, times="infinity")}}
|
||||
</div>
|
||||
<div id="window_shortcuts" class="tab-pane container-fluid">
|
||||
{{include 'default/editor_shortcuts.html'}}
|
||||
</div>
|
||||
<div id="window_todo" class="tab-pane container-fluid">
|
||||
{{=LOAD('default', 'todolist.load', vars={'app':app}, ajax=True, timeout=60000, times="infinity")}}
|
||||
</div>
|
||||
<div id="window_shortcuts" class="tab-pane container-fluid">
|
||||
{{include 'default/editor_shortcuts.html'}}
|
||||
</div>
|
||||
<div id="window_dbhooks" class="tab-pane container-fluid">
|
||||
<h4>Tables hooks</h4>
|
||||
<div>{{=LOAD(url="/%s/appadmin/hooks" % app, ajax=True, timeout=60000, times="infinity")}}</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
{{block footer}}
|
||||
@@ -272,6 +276,7 @@ $(document).on('click', 'a.font_button', function (e) {
|
||||
<ul id="windows_hooks" class="nav">
|
||||
<li class=""><a href="#window_todo">TODO</a></li>
|
||||
<li class=""><a href="#window_shortcuts">Shortcuts</a></li>
|
||||
<li class=""><a href="#window_dbhooks">Hooks</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
<div>
|
||||
<h4>{{=T("Keyboard shortcuts")}}</h4>
|
||||
<ul class="keybindings unstyled">
|
||||
<li></li>
|
||||
{{=shortcut('Ctrl+S', T('Save via Ajax'))}}
|
||||
{{=shortcut('Ctrl+F11', T('Toggle Fullscreen'))}}
|
||||
{{=shortcut('Shift+Esc', T('Exit Fullscreen'))}}
|
||||
{{=shortcut('Ctrl-F / Cmd-F', T('Start searching'))}}
|
||||
{{=shortcut('Ctrl-G / Cmd-G', T('Find Next'))}}
|
||||
{{=shortcut('Shift-Ctrl-G / Shift-Cmd-G', T('Find Previous'))}}
|
||||
{{=shortcut('Shift-Ctrl-F / Cmd-Option-F', T('Replace'))}}
|
||||
{{=shortcut('Shift-Ctrl-R / Shift-Cmd-Option-F', T('Replace All'))}}
|
||||
{{=shortcut('Ctrl-/ ', T('Toggle comment'))}}
|
||||
{{=shortcut('Tab', T('Expand Abbreviation (html files only)'))}}
|
||||
{{=shortcut('Ctrl-Space', T('Autocomplete Python Code'))}}
|
||||
</ul>
|
||||
</div>
|
||||
<ul class="keybindings unstyled">
|
||||
<li></li>
|
||||
{{=shortcut('Ctrl+S', T('Save via Ajax'))}}
|
||||
{{=shortcut('Ctrl+F11', T('Toggle Fullscreen'))}}
|
||||
{{=shortcut('Shift+Esc', T('Exit Fullscreen'))}}
|
||||
{{=shortcut('Ctrl-F / Cmd-F', T('Start searching'))}}
|
||||
{{=shortcut('Ctrl-G / Cmd-G', T('Find Next'))}}
|
||||
{{=shortcut('Shift-Ctrl-G / Shift-Cmd-G', T('Find Previous'))}}
|
||||
{{=shortcut('Shift-Ctrl-F / Cmd-Option-F', T('Replace'))}}
|
||||
{{=shortcut('Shift-Ctrl-R / Shift-Cmd-Option-F', T('Replace All'))}}
|
||||
{{=shortcut('Ctrl-/ ', T('Toggle comment'))}}
|
||||
{{=shortcut('Tab', T('Expand Abbreviation (html files only)'))}}
|
||||
{{=shortcut('Ctrl-Space', T('Autocomplete Python Code'))}}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -4,12 +4,10 @@ frm = form
|
||||
smt_button = frm.element(_type="submit")
|
||||
smt_button['_class'] = 'btn'
|
||||
smt_button['_style'] = 'margin-right:4px;'
|
||||
ccl_button = frm.element(_type="button")
|
||||
ccl_button['_class'] = 'btn'
|
||||
}}
|
||||
<!-- begin "git_push" block -->
|
||||
<h2>{{=T('This will push changes to the remote repo for application "%s".', app)}}</h2>
|
||||
<center>
|
||||
{{=form}}
|
||||
</center>
|
||||
<!-- end "git_push" block -->
|
||||
<!-- end "git_push" block -->
|
||||
|
||||
@@ -461,34 +461,24 @@ def ccache():
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
folder = os.path.join(request.folder,'cache')
|
||||
if not os.path.exists(folder):
|
||||
os.mkdir(folder)
|
||||
locker = open(os.path.join(folder, 'cache.lock'), 'a')
|
||||
portalocker.lock(locker, portalocker.LOCK_EX)
|
||||
disk_storage = shelve.open(
|
||||
os.path.join(folder, 'cache.shelve'))
|
||||
try:
|
||||
for key, value in disk_storage.items():
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
disk['bytes'] += hp.iso(value[1]).size
|
||||
disk['objects'] += hp.iso(value[1]).count
|
||||
disk['entries'] += 1
|
||||
if value[0] < disk['oldest']:
|
||||
disk['oldest'] = value[0]
|
||||
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
finally:
|
||||
portalocker.unlock(locker)
|
||||
locker.close()
|
||||
disk_storage.close()
|
||||
|
||||
for key in cache.disk.storage:
|
||||
value = cache.disk.storage[key]
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
disk['bytes'] += hp.iso(value[1]).size
|
||||
disk['objects'] += hp.iso(value[1]).count
|
||||
disk['entries'] += 1
|
||||
if value[0] < disk['oldest']:
|
||||
disk['oldest'] = value[0]
|
||||
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
|
||||
total['entries'] = ram['entries'] + disk['entries']
|
||||
total['bytes'] = ram['bytes'] + disk['bytes']
|
||||
@@ -667,3 +657,42 @@ def manage():
|
||||
kwargs.update(**smartgrid_args.get(table._tablename, {}))
|
||||
grid = SQLFORM.smartgrid(table, args=request.args[:2], formname=formname, **kwargs)
|
||||
return grid
|
||||
|
||||
def hooks():
|
||||
import functools
|
||||
import inspect
|
||||
list_op=['_%s_%s' %(h,m) for h in ['before', 'after'] for m in ['insert','update','delete']]
|
||||
tables=[]
|
||||
with_build_it=False
|
||||
for db_str in sorted(databases):
|
||||
db = databases[db_str]
|
||||
for t in db.tables:
|
||||
method_hooks=[]
|
||||
for op in list_op:
|
||||
functions = []
|
||||
for f in getattr(db[t], op):
|
||||
if hasattr(f, '__call__'):
|
||||
if isinstance(f, (functools.partial)):
|
||||
f = f.func
|
||||
filename = inspect.getsourcefile(f)
|
||||
details = {'funcname':f.__name__,
|
||||
'filename':filename[len(request.folder):] if request.folder in filename else None,
|
||||
'lineno': inspect.getsourcelines(f)[1]}
|
||||
if details['filename']: # Built in functions as delete_uploaded_files are not editable
|
||||
details['url'] = URL(a='admin',c='default',f='edit', args=[request['application'], details['filename']],vars={'lineno':details['lineno']})
|
||||
if details['filename'] or with_build_it:
|
||||
functions.append(details)
|
||||
if len(functions):
|
||||
method_hooks.append({'name':op, 'functions':functions})
|
||||
if len(method_hooks):
|
||||
tables.append({'name':"%s.%s" % (db_str,t), 'slug': IS_SLUG()("%s.%s" % (db_str,t))[0], 'method_hooks':method_hooks})
|
||||
# Render
|
||||
ul_main = UL(_class='nav nav-list')
|
||||
for t in tables:
|
||||
ul_main.append(A(t['name'], _onclick="collapse('a_%s')" % t['slug']))
|
||||
ul_t = UL(_class='nav nav-list', _id="a_%s" % t['slug'], _style='display:none')
|
||||
for op in t['method_hooks']:
|
||||
ul_t.append(LI (op['name']))
|
||||
ul_t.append(UL([LI(A(f['funcname'], _class="editor_filelink", _href=f['url']if 'url' in f else None, **{'_data-lineno':f['lineno']-1})) for f in op['functions']]))
|
||||
ul_main.append(ul_t)
|
||||
return ul_main
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,35 +12,45 @@
|
||||
{{if request.function=='index':}}
|
||||
<h2>{{=T("Available Databases and Tables")}}</h2>
|
||||
{{if not databases:}}{{=T("No databases in this application")}}{{pass}}
|
||||
<table>
|
||||
{{for db in sorted(databases):}}
|
||||
{{for table in databases[db].tables:}}
|
||||
{{qry='%s.%s.id>0'%(db,table)}}
|
||||
{{tbl=databases[db][table]}}
|
||||
{{if hasattr(tbl,'_primarykey'):}}
|
||||
{{if tbl._primarykey:}}
|
||||
{{firstkey=tbl[tbl._primarykey[0]]}}
|
||||
{{if firstkey.type in ['string','text']:}}
|
||||
{{qry='%s.%s.%s!=""'%(db,table,firstkey.name)}}
|
||||
{{else:}}
|
||||
{{qry='%s.%s.%s>0'%(db,table,firstkey.name)}}
|
||||
{{pass}}
|
||||
{{else:}}
|
||||
{{qry=''}}
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
<tr>
|
||||
<th style="font-size: 1.75em;">
|
||||
{{=A("%s.%s" % (db,table),_href=URL('select',args=[db],vars=dict(query=qry)))}}
|
||||
</th>
|
||||
<td>
|
||||
{{=A(str(T('New Record')),_href=URL('insert',args=[db,table]),_class="btn")}}
|
||||
</td>
|
||||
</tr>
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
</table>
|
||||
|
||||
<ul class="nav nav-tabs" id="myTab">
|
||||
<li class="active" ><a href="#alltables" data-toggle="tab">Tables</a></li>
|
||||
<li><a href="#hooks" data-toggle="tab">Hooks</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div class="tab-pane active" id="alltables">
|
||||
<table>
|
||||
{{for db in sorted(databases):}}
|
||||
{{for table in databases[db].tables:}}
|
||||
{{qry='%s.%s.id>0'%(db,table)}}
|
||||
{{tbl=databases[db][table]}}
|
||||
{{if hasattr(tbl,'_primarykey'):}}
|
||||
{{if tbl._primarykey:}}
|
||||
{{firstkey=tbl[tbl._primarykey[0]]}}
|
||||
{{if firstkey.type in ['string','text']:}}
|
||||
{{qry='%s.%s.%s!=""'%(db,table,firstkey.name)}}
|
||||
{{else:}}
|
||||
{{qry='%s.%s.%s>0'%(db,table,firstkey.name)}}
|
||||
{{pass}}
|
||||
{{else:}}
|
||||
{{qry=''}}
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
<tr>
|
||||
<th style="font-size: 1.75em;">
|
||||
{{=A("%s.%s" % (db,table),_href=URL('select',args=[db],vars=dict(query=qry)))}}
|
||||
</th>
|
||||
<td>
|
||||
{{=A(str(T('New Record')),_href=URL('insert',args=[db,table]),_class="btn")}}
|
||||
</td>
|
||||
</tr>
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
</table>
|
||||
</div>
|
||||
<div class="tab-pane" id="hooks">
|
||||
{{=LOAD('appadmin', 'hooks', ajax=True)}}
|
||||
</div>
|
||||
</div>
|
||||
{{elif request.function=='select':}}
|
||||
<h2>{{=XML(str(T("Database %s select"))%A(request.args[0],_href=URL('index'))) }}
|
||||
</h2>
|
||||
|
||||
@@ -461,34 +461,24 @@ def ccache():
|
||||
if value[0] < ram['oldest']:
|
||||
ram['oldest'] = value[0]
|
||||
ram['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
folder = os.path.join(request.folder,'cache')
|
||||
if not os.path.exists(folder):
|
||||
os.mkdir(folder)
|
||||
locker = open(os.path.join(folder, 'cache.lock'), 'a')
|
||||
portalocker.lock(locker, portalocker.LOCK_EX)
|
||||
disk_storage = shelve.open(
|
||||
os.path.join(folder, 'cache.shelve'))
|
||||
try:
|
||||
for key, value in disk_storage.items():
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
disk['bytes'] += hp.iso(value[1]).size
|
||||
disk['objects'] += hp.iso(value[1]).count
|
||||
disk['entries'] += 1
|
||||
if value[0] < disk['oldest']:
|
||||
disk['oldest'] = value[0]
|
||||
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
finally:
|
||||
portalocker.unlock(locker)
|
||||
locker.close()
|
||||
disk_storage.close()
|
||||
|
||||
for key in cache.disk.storage:
|
||||
value = cache.disk.storage[key]
|
||||
if isinstance(value, dict):
|
||||
disk['hits'] = value['hit_total'] - value['misses']
|
||||
disk['misses'] = value['misses']
|
||||
try:
|
||||
disk['ratio'] = disk['hits'] * 100 / value['hit_total']
|
||||
except (KeyError, ZeroDivisionError):
|
||||
disk['ratio'] = 0
|
||||
else:
|
||||
if hp:
|
||||
disk['bytes'] += hp.iso(value[1]).size
|
||||
disk['objects'] += hp.iso(value[1]).count
|
||||
disk['entries'] += 1
|
||||
if value[0] < disk['oldest']:
|
||||
disk['oldest'] = value[0]
|
||||
disk['keys'].append((key, GetInHMS(time.time() - value[0])))
|
||||
|
||||
total['entries'] = ram['entries'] + disk['entries']
|
||||
total['bytes'] = ram['bytes'] + disk['bytes']
|
||||
@@ -667,3 +657,42 @@ def manage():
|
||||
kwargs.update(**smartgrid_args.get(table._tablename, {}))
|
||||
grid = SQLFORM.smartgrid(table, args=request.args[:2], formname=formname, **kwargs)
|
||||
return grid
|
||||
|
||||
def hooks():
|
||||
import functools
|
||||
import inspect
|
||||
list_op=['_%s_%s' %(h,m) for h in ['before', 'after'] for m in ['insert','update','delete']]
|
||||
tables=[]
|
||||
with_build_it=False
|
||||
for db_str in sorted(databases):
|
||||
db = databases[db_str]
|
||||
for t in db.tables:
|
||||
method_hooks=[]
|
||||
for op in list_op:
|
||||
functions = []
|
||||
for f in getattr(db[t], op):
|
||||
if hasattr(f, '__call__'):
|
||||
if isinstance(f, (functools.partial)):
|
||||
f = f.func
|
||||
filename = inspect.getsourcefile(f)
|
||||
details = {'funcname':f.__name__,
|
||||
'filename':filename[len(request.folder):] if request.folder in filename else None,
|
||||
'lineno': inspect.getsourcelines(f)[1]}
|
||||
if details['filename']: # Built in functions as delete_uploaded_files are not editable
|
||||
details['url'] = URL(a='admin',c='default',f='edit', args=[request['application'], details['filename']],vars={'lineno':details['lineno']})
|
||||
if details['filename'] or with_build_it:
|
||||
functions.append(details)
|
||||
if len(functions):
|
||||
method_hooks.append({'name':op, 'functions':functions})
|
||||
if len(method_hooks):
|
||||
tables.append({'name':"%s.%s" % (db_str,t), 'slug': IS_SLUG()("%s.%s" % (db_str,t))[0], 'method_hooks':method_hooks})
|
||||
# Render
|
||||
ul_main = UL(_class='nav nav-list')
|
||||
for t in tables:
|
||||
ul_main.append(A(t['name'], _onclick="collapse('a_%s')" % t['slug']))
|
||||
ul_t = UL(_class='nav nav-list', _id="a_%s" % t['slug'], _style='display:none')
|
||||
for op in t['method_hooks']:
|
||||
ul_t.append(LI (op['name']))
|
||||
ul_t.append(UL([LI(A(f['funcname'], _class="editor_filelink", _href=f['url']if 'url' in f else None, **{'_data-lineno':f['lineno']-1})) for f in op['functions']]))
|
||||
ul_main.append(ul_t)
|
||||
return ul_main
|
||||
|
||||
@@ -0,0 +1,492 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'ca',
|
||||
'!langname!': 'Català',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualizi" és una expressió opcional com "camp1=\'nou_valor\'". No es poden actualitzar o eliminar resultats de un JOIN',
|
||||
'%(nrows)s records found': '%(nrows)s registres trobats',
|
||||
'%s %%{position}': '%s %%{posició}',
|
||||
'%s %%{row} deleted': '%s %%{fila} %%{eliminada}',
|
||||
'%s %%{row} updated': '%s %%{fila} %%{actualitzada}',
|
||||
'%s selected': '%s %%{seleccionat}',
|
||||
'%Y-%m-%d': '%d/%m/%Y',
|
||||
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
|
||||
'(something like "it-it")': '(similar a "això-això")',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'Hi ha hagut un error, si us plau [[recarregui %s]] la pàgina',
|
||||
'@markmin\x01Number of entries: **%s**': "Nombre d'entrades: **%s**",
|
||||
'A new version of web2py is available': 'Hi ha una nova versió de wep2py disponible',
|
||||
'A new version of web2py is available: %s': 'Hi ha una nova versió de wep2py disponible: %s',
|
||||
'About': 'Sobre',
|
||||
'about': 'sobre',
|
||||
'About application': "Sobre l'aplicació",
|
||||
'Access Control': "Control d'Accés",
|
||||
'Add': 'Afegir',
|
||||
'Add Record': 'Afegeix registre',
|
||||
'additional code for your application': '`codi addicional per a la seva aplicació',
|
||||
'admin disabled because no admin password': 'admin inhabilitat per falta de contrasenya',
|
||||
'admin disabled because not supported on google app engine': 'admin inhabilitat, no és suportat en GAE',
|
||||
'admin disabled because unable to access password file': 'admin inhabilitat, impossible accedir al fitxer con la contrasenya',
|
||||
'Admin is disabled because insecure channel': 'Admin inhabilitat, el canal no és segur',
|
||||
'Admin is disabled because unsecure channel': 'Admin inhabilitat, el canal no és segur',
|
||||
'Administrative interface': 'Interfície administrativa',
|
||||
'Administrative Interface': 'Interfície Administrativa',
|
||||
'administrative interface': 'interfície administrativa',
|
||||
'Administrator Password:': 'Contrasenya del Administrador:',
|
||||
'Ajax Recipes': 'Receptes AJAX',
|
||||
'An error occured, please %s the page': 'Hi ha hagut un error, per favor %s la pàgina',
|
||||
'And': 'I',
|
||||
'and rename it (required):': 'i renombri-la (requerit):',
|
||||
'and rename it:': " i renombri'l:",
|
||||
'appadmin': 'appadmin',
|
||||
'appadmin is disabled because insecure channel': 'admin inhabilitat, el canal no és segur',
|
||||
'application "%s" uninstalled': 'aplicació "%s" desinstal·lada',
|
||||
'application compiled': 'aplicació compilada',
|
||||
'application is compiled and cannot be designed': 'la aplicació està compilada i no pot ser modificada',
|
||||
'Apply changes': 'Aplicar canvis',
|
||||
'Appointment': 'Nomenament',
|
||||
'Are you sure you want to delete file "%s"?': 'Està segur que vol eliminar el arxiu "%s"?',
|
||||
'Are you sure you want to delete this object?': 'Està segur que vol esborrar aquest objecte?',
|
||||
'Are you sure you want to uninstall application "%s"': '¿Està segur que vol desinstalar la aplicació "%s"',
|
||||
'Are you sure you want to uninstall application "%s"?': '¿Està segur que vol desinstalar la aplicació "%s"?',
|
||||
'at': 'a',
|
||||
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENCIÓ: Inici de sessió requereix una connexió segura (HTTPS) o localhost.',
|
||||
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENCION: NO EJECUTE VARIAS PRUEBAS SIMULTANEAMENTE, NO SON THREAD SAFE.',
|
||||
'ATTENTION: you cannot edit the running application!': 'ATENCIO: no pot modificar la aplicació que està ejecutant-se!',
|
||||
'Authentication': 'Autenticació',
|
||||
'Authentication failed at client DB!': '¡La autenticació ha fallat en la BDD client!',
|
||||
'Authentication failed at main DB!': '¡La autenticació ha fallat en la BDD principal!',
|
||||
'Available Databases and Tables': 'Bases de dades i taules disponibles',
|
||||
'Back': 'Endarrera',
|
||||
'Buy this book': 'Compra aquest lllibre',
|
||||
'Cache': 'Caché',
|
||||
'cache': 'caché',
|
||||
'Cache Cleared': 'Caché Netejada',
|
||||
'Cache Keys': 'Claus de la Caché',
|
||||
'cache, errors and sessions cleaned': 'caché, errors i sessions eliminats',
|
||||
'Cannot be empty': 'No pot estar buit',
|
||||
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'No se pot compilar: hi ha errors en la seva aplicació. Depuri, corregeixi errors i torni a intentar-ho.',
|
||||
'cannot upload file "%(filename)s"': 'no és possible pujar fitxer "%(filename)s"',
|
||||
'Change Password': 'Canviï la Contrasenya',
|
||||
'Change password': 'Canviï la contrasenya',
|
||||
'change password': 'canviï la contrasenya',
|
||||
'Changelog': 'Changelog',
|
||||
'check all': 'marcar tots',
|
||||
'Check to delete': 'Marqui per a eliminar',
|
||||
'choose one': 'escolliu un',
|
||||
'clean': 'neteja',
|
||||
'Clear': 'Netejar',
|
||||
'Clear CACHE?': 'Netejar Memòrica Cau?',
|
||||
'Clear DISK': 'Netejar DISC',
|
||||
'Clear RAM': 'Netejar RAM',
|
||||
'Click on the link %(link)s to reset your password': "Cliqui en l'enllaç %(link)s per a reiniciar la seva contrasenya",
|
||||
'click to check for upgrades': 'feu clic per buscar actualitzacions',
|
||||
'client': 'cliente',
|
||||
'Client IP': 'IP del Client',
|
||||
'Close': 'Tancar',
|
||||
'Comma-separated export including columns not shown; fields from other tables are exported as raw values for faster export': 'Comma-separated export including columns not shown; fields from other tables are exported as raw values for faster export',
|
||||
'Comma-separated export of visible columns. Fields from other tables are exported as they appear on-screen but this may be slow for many rows': 'Comma-separated export of visible columns. Fields from other tables are exported as they appear on-screen but this may be slow for many rows',
|
||||
'Community': 'Comunitat',
|
||||
'compile': 'compilar',
|
||||
'compiled application removed': 'aplicació compilada eliminada',
|
||||
'Components and Plugins': 'Components i Plugins',
|
||||
'contains': 'conté',
|
||||
'Controller': 'Controlador',
|
||||
'Controllers': 'Controladors',
|
||||
'controllers': 'controladors',
|
||||
'Copyright': 'Copyright',
|
||||
'Correo electrónico invàlid': 'Correu electrònic invàlid',
|
||||
'create file with filename:': 'crear el fitxer amb el nom:',
|
||||
'Create new application': 'Crear una nova aplicació',
|
||||
'create new application:': 'crear una nova aplicació:',
|
||||
'Create New Page': 'Crear Pàgina Nova',
|
||||
'Create Page from Slug': 'Create Page from Slug',
|
||||
'Created By': 'Creat Per',
|
||||
'Created On': 'Creat a',
|
||||
'CSV': 'CSV',
|
||||
'CSV (hidden cols)': 'CSV (columnas ocultes)',
|
||||
'Current request': 'Sol·licitud en curs',
|
||||
'Current response': 'Resposta en curs',
|
||||
'Current session': 'Sessió en curs',
|
||||
'currently saved or': 'actualment guardat o',
|
||||
'customize me!': "¡Adapta'm!",
|
||||
'data uploaded': 'dades pujades',
|
||||
'Database': 'Base de dades',
|
||||
'Database %s select': 'selecció a base de dades %s',
|
||||
'database administration': 'administració de base de dades',
|
||||
'Database Administration (appadmin)': 'Administració de Base de Dades (appadmin)',
|
||||
'Date and Time': 'Data i Hora',
|
||||
'DB': 'BDD',
|
||||
'db': 'bdd',
|
||||
'DB Model': 'Model BDD',
|
||||
'defines tables': 'defineix taules',
|
||||
'Delete': 'Eliminar',
|
||||
'delete': 'eliminar',
|
||||
'delete all checked': 'eliminar marcats',
|
||||
'Delete:': 'Eliminar:',
|
||||
'Demo': 'Demostració',
|
||||
'Deploy on Google App Engine': 'Desplegament a Google App Engine',
|
||||
'Deployment Recipes': 'Receptes de desplegament',
|
||||
'Description': 'Descripció',
|
||||
'design': 'diseny',
|
||||
'DESIGN': 'DISENY',
|
||||
'Design for': 'Diseny per a',
|
||||
'detecting': 'detectant',
|
||||
'DISK': 'DISC',
|
||||
'Disk Cache Keys': 'Claus de Caché en Disc',
|
||||
'Disk Cleared': 'Disc netejat',
|
||||
'Documentation': 'Documentació',
|
||||
"Don't know what to do?": 'No sap què fer?',
|
||||
'done!': '¡fet!',
|
||||
'Download': 'Descàrregues',
|
||||
'E-mail': 'Correu electrònic',
|
||||
'edit': 'editar',
|
||||
'EDIT': 'EDITAR',
|
||||
'Edit': 'Editar',
|
||||
'Edit application': 'Editar aplicació',
|
||||
'edit controller': 'editar controlador',
|
||||
'Edit current record': 'Editar el registre actual',
|
||||
'Edit Menu': 'Editar Menu',
|
||||
'Edit Page': 'Editar Pàgina',
|
||||
'Edit Page Media': 'Edit Page Media',
|
||||
'Edit Profile': 'Editar Perfil',
|
||||
'edit profile': 'editar perfil',
|
||||
'Edit This App': 'Editi aquesta App',
|
||||
'Editing file': 'Editant fitxer',
|
||||
'Editing file "%s"': 'Editant fitxer "%s"',
|
||||
'El fitxer ha de ser PDF': 'El fitxer ha de ser PDF',
|
||||
'El fitxer ha de ser PDF o XML': 'El fitxer ha de ser PDF o XML',
|
||||
'Email': 'Email',
|
||||
'Email and SMS': 'Correu electrònic i SMS',
|
||||
'Email sent': 'Correu electrònic enviat',
|
||||
'End of impersonation': 'Fi de suplantació',
|
||||
'enter a number between %(min)g and %(max)g': 'introdueixi un número entre %(min)g i %(max)g',
|
||||
'Enter a valid email address': 'Entri una adreça email vàlida',
|
||||
'enter a value': 'entri un valor',
|
||||
'Enter a value': 'Entri un valor',
|
||||
'Enter an integer between %(min)g and %(max)g': 'Entri un numero enter entre %(min)g i %(max)g',
|
||||
'enter an integer between %(min)g and %(max)g': 'entri numero enter entre %(min)g i %(max)g',
|
||||
'enter date and time as %(format)s': 'entri data i hora com %(format)s',
|
||||
'Enter from %(min)g to %(max)g characters': 'Entri des de %(min)g a %(max)g caràcters',
|
||||
'Enter valid filename': 'Entri nom de fitxer vàlid',
|
||||
'Error logs for "%(app)s"': 'Bitàcora de errors a "%(app)s"',
|
||||
'errors': 'errors',
|
||||
'Errors': 'Errors',
|
||||
'Errors in form, please check it out.': 'Hi ha errors en el formulari, per favor comprovi-ho.',
|
||||
'export as csv file': 'exportar com fitxer CSV',
|
||||
'Export:': 'Exportar:',
|
||||
'exposes': 'exposa',
|
||||
'extends': 'extén',
|
||||
'failed to reload module': 'la recàrrega del mòdul ha fallat',
|
||||
'FAQ': 'FAQ',
|
||||
'file': 'fitxer',
|
||||
'file "%(filename)s" created': 'fitxer "%(filename)s" creat',
|
||||
'file "%(filename)s" deleted': 'fitxer "%(filename)s" eliminat',
|
||||
'file "%(filename)s" uploaded': 'fitxer "%(filename)s" pujat',
|
||||
'file "%(filename)s" was not deleted': 'fitxer "%(filename)s" no fou eliminat',
|
||||
'file "%s" of %s restored': 'fitxer "%s" de %s restaurat',
|
||||
'file ## download': 'file ',
|
||||
'file changed on disk': 'fitxer modificat en el disco',
|
||||
'file does not exist': 'fitxer no existeix',
|
||||
'file saved on %(time)s': 'fitxer guardat a %(time)s',
|
||||
'file saved on %s': 'fitxer guardat a %s',
|
||||
'First name': 'Nom',
|
||||
'Forgot username?': 'Ha oblidat el nom de usuari?',
|
||||
'Forms and Validators': 'Formularis i validadors',
|
||||
'Free Applications': 'Aplicacions Lliures',
|
||||
'Functions with no doctests will result in [passed] tests.': 'Funcions sense doctests equivalen a pruebas [aceptades].',
|
||||
'Group %(group_id)s created': 'Grupo %(group_id)s creat',
|
||||
'Group ID': 'ID de Grup',
|
||||
'Group uniquely assigned to user %(id)s': 'Grup assignat únicament al usuari %(id)s',
|
||||
'Groups': 'Grups',
|
||||
'Hello': 'Hola',
|
||||
'Hello World': 'Hola Món',
|
||||
'help': 'ajuda',
|
||||
'Home': 'Inici',
|
||||
'Hosted by': 'Hosted by',
|
||||
'How did you get here?': 'Com has arribat aquí?',
|
||||
'HTML': 'HTML',
|
||||
'HTML export of visible columns': 'HTML export de columnes visibles',
|
||||
'htmledit': 'htmledit',
|
||||
'Impersonate': 'Suplantar',
|
||||
'import': 'importar',
|
||||
'Import/Export': 'Importar/Exportar',
|
||||
'in': 'a',
|
||||
'includes': 'inclou',
|
||||
'Index': 'Índex',
|
||||
'insert new': 'inserti nou',
|
||||
'insert new %s': 'inserti nou %s',
|
||||
'Installed applications': 'Aplicacions instalades',
|
||||
'Insufficient privileges': 'Privilegis insuficients',
|
||||
'internal error': 'error intern',
|
||||
'Internal State': 'Estat Intern',
|
||||
'Introduction': 'Introducció',
|
||||
'Invalid action': 'Acció invàlida',
|
||||
'Invalid email': 'Correo electrónico invàlid',
|
||||
'invalid expression': 'expressió invàlida',
|
||||
'Invalid login': 'Inici de sessió invàlida',
|
||||
'invalid password': 'contrasenya invàlida',
|
||||
'Invalid Query': 'Consulta invàlida',
|
||||
'invalid request': 'sol·licitud invàlida',
|
||||
'Invalid reset password': 'Reinici de contrasenya invàlid',
|
||||
'invalid ticket': 'tiquet invàlid',
|
||||
'Is Active': 'Està Actiu',
|
||||
'Key': 'Clau',
|
||||
'language file "%(filename)s" created/updated': 'fitxer de llenguatge "%(filename)s" creat/actualitzat',
|
||||
'Language files (static strings) updated': 'Fitxers de llenguatge (cadenes estàtiques) actualitzats',
|
||||
'languages': 'llenguatges',
|
||||
'Languages': 'Llenguatges',
|
||||
'languages updated': 'llenguatges actualitzats',
|
||||
'Last name': 'Cognom',
|
||||
'Last saved on:': 'Guardat a:',
|
||||
'Layout': 'Diseny de pàgina',
|
||||
'Layout Plugins': 'Plugins de disseny',
|
||||
'Layouts': 'Dissenys de pàgines',
|
||||
'License for': 'Llicència per a',
|
||||
'Live Chat': 'Xat en viu',
|
||||
'loading...': 'carregant...',
|
||||
'Log In': 'Log In',
|
||||
'Log Out': 'Log Out',
|
||||
'Logged in': 'Sessió iniciada',
|
||||
'Logged out': 'Sessió finalitzada',
|
||||
'Login': 'Inici de sessió',
|
||||
'login': 'inici de sessió',
|
||||
'Login disabled by administrator': 'Inici de sessió inhabilitat pel administrador',
|
||||
'Login to the Administrative Interface': 'Inici de sessió per a la Interfície Administrativa',
|
||||
'logout': 'fi de sessió',
|
||||
'Logout': 'Fi de sessió',
|
||||
'Lost Password': 'Contrasenya perdida',
|
||||
'Lost password?': 'Ha oblidat la contrasenya?',
|
||||
'lost password?': '¿ha oblidat la contrasenya?',
|
||||
'Main Menu': 'Menú principal',
|
||||
'Manage %(action)s': 'Manage %(action)s',
|
||||
'Manage Access Control': 'Manage Access Control',
|
||||
'Manage Cache': 'Gestionar la Caché',
|
||||
'Menu Model': 'Model "menu"',
|
||||
'merge': 'combinar',
|
||||
'Models': 'Models',
|
||||
'models': 'models',
|
||||
'Modified By': 'Modificat Per',
|
||||
'Modified On': 'Modificat A',
|
||||
'Modules': 'Mòduls',
|
||||
'modules': 'mòduls',
|
||||
'must be YYYY-MM-DD HH:MM:SS!': '¡debe ser DD/MM/YYYY HH:MM:SS!',
|
||||
'must be YYYY-MM-DD!': '¡debe ser DD/MM/YYYY!',
|
||||
'My Sites': 'Els Meus Llocs',
|
||||
'Name': 'Nombre',
|
||||
'New': 'Nuevo',
|
||||
'New %(entity)s': 'Nou %(entity)s',
|
||||
'new application "%s" created': 'nova aplicació "%s" creada',
|
||||
'New password': 'Contrasenya nova',
|
||||
'New Record': 'Registre nou',
|
||||
'new record inserted': 'nou registre insertat',
|
||||
'New Search': 'Cerca nova',
|
||||
'next %s rows': 'següents %s files',
|
||||
'next 100 rows': '100 files següents',
|
||||
'NO': 'NO',
|
||||
'No databases in this application': 'No hi ha bases de dades en esta aplicació',
|
||||
'No records found': "No s'han trobat registres",
|
||||
'Not authorized': 'No autoritzat',
|
||||
'not in': 'no a',
|
||||
'Object or table name': 'Nom del objecte o taula',
|
||||
'Old password': 'Contrasenya anterior',
|
||||
'Online examples': 'Ejemples en línia',
|
||||
'Or': 'O',
|
||||
'or import from csv file': 'o importar desde fitxer CSV',
|
||||
'or provide application url:': 'o proveeix URL de la aplicació:',
|
||||
'Origin': 'Origen',
|
||||
'Original/Translation': 'Original/Traducció',
|
||||
'Other Plugins': 'Altres Plugins',
|
||||
'Other Recipes': 'Altres Receptes',
|
||||
'Overview': 'Resum',
|
||||
'pack all': 'empaquetar tot',
|
||||
'pack compiled': 'empaquetar compilats',
|
||||
'Password': 'Contrasenya',
|
||||
'Password changed': 'Contrasenya cambiada',
|
||||
"Password fields don't match": 'Els camps de contrasenya no coincideixen',
|
||||
'Password reset': 'Reinici de contrasenya',
|
||||
'Peeking at file': 'Visualitzant fitxer',
|
||||
'Permission': 'Permís',
|
||||
'Permissions': 'Permisos',
|
||||
'Phone': 'Telèfon',
|
||||
'please input your password again': 'si us plau, entri un altre cop la seva contrasenya',
|
||||
'Plugins': 'Plugins',
|
||||
'Powered by': 'Aquest lloc utilitza',
|
||||
'Preface': 'Prefaci',
|
||||
'Presentar Factures': 'Presentar Factures',
|
||||
'Presentar factures': 'Presentar factures',
|
||||
'previous %s rows': '%s files prèvies',
|
||||
'previous 100 rows': '100 files anteriors',
|
||||
'Profile': 'Perfil',
|
||||
'Profile updated': 'Perfil actualitzat',
|
||||
'pygraphviz library not found': 'pygraphviz library not found',
|
||||
'Python': 'Python',
|
||||
'Query Not Supported: %s': 'Consulta No Suportada: %s',
|
||||
'Query:': 'Consulta:',
|
||||
'Quick Examples': 'Exemple Ràpids',
|
||||
'RAM': 'RAM',
|
||||
'RAM Cache Keys': 'Claus de la Caché en RAM',
|
||||
'Ram Cleared': 'Ram Netjeda',
|
||||
'Recipes': 'Receptes',
|
||||
'Record': 'Registre',
|
||||
'Record %(id)s created': 'Registre %(id)s creat',
|
||||
'Record Created': 'Registre Creat',
|
||||
'record does not exist': 'el registre no existe',
|
||||
'Record ID': 'ID de Registre',
|
||||
'Record id': 'Id de registre',
|
||||
'Ref APB': 'Ref APB',
|
||||
'register': "registri's",
|
||||
'Register': "Registri's",
|
||||
'Registration identifier': 'Identificador de Registre',
|
||||
'Registration key': 'Clau de registre',
|
||||
'Registration successful': 'Registre amb èxit',
|
||||
'reload': 'recarregar',
|
||||
'Remember me (for 30 days)': "Recordi'm (durant 30 dies)",
|
||||
'remove compiled': 'eliminar compilades',
|
||||
'Request reset password': 'Sol·licitud de restabliment de contrasenya',
|
||||
'Reset password': 'Reiniciar contrasenya',
|
||||
'Reset Password key': 'Restaurar Clau de la Contrasenya',
|
||||
'Resolve Conflict file': 'Resolgui el Conflicte de fitxer',
|
||||
'restore': 'restaurar',
|
||||
'Retrieve username': 'Recuperar nom de usuari',
|
||||
'revert': 'revertir',
|
||||
'Role': 'Rol',
|
||||
'Roles': 'Rols',
|
||||
'Rows in Table': 'Files a la taula',
|
||||
'Rows selected': 'Files seleccionades',
|
||||
'save': 'guardar',
|
||||
'Save model as...': 'Save model as...',
|
||||
'Saved file hash:': 'Hash del fitxer guardat:',
|
||||
'Search': 'Buscar',
|
||||
'Search Pages': 'Search Pages',
|
||||
'Semantic': 'Semàntica',
|
||||
'Services': 'Serveis',
|
||||
'session expired': 'sessió expirada',
|
||||
'shell': 'terminal',
|
||||
'Sign Up': 'Sign Up',
|
||||
'site': 'lloc',
|
||||
'Size of cache:': 'Mida de la Caché:',
|
||||
'Slug': 'Slug',
|
||||
'some files could not be removed': 'algunos archivos no pudieron ser removidos',
|
||||
'Spreadsheet-optimised export of tab-separated content including hidden columns. May be slow': 'Spreadsheet-optimised export of tab-separated content including hidden columns. May be slow',
|
||||
'Spreadsheet-optimised export of tab-separated content, visible columns only. May be slow.': 'Spreadsheet-optimised export of tab-separated content, visible columns only. May be slow.',
|
||||
'start': 'inici',
|
||||
'Start building a new search': 'Start building a new search',
|
||||
'starts with': 'comença per',
|
||||
'state': 'estat',
|
||||
'static': 'estàtics',
|
||||
'Static files': 'Fitxers estàtics',
|
||||
'Statistics': 'Estadístiques',
|
||||
'Stylesheet': "Fulla d'estil",
|
||||
'Submit': 'Enviar',
|
||||
'submit': 'enviar',
|
||||
'Success!': 'Correcte!',
|
||||
'Support': 'Suport',
|
||||
'Sure you want to delete this object?': '¿Està segur que vol eliminar aquest objecte?',
|
||||
'Table': 'taula',
|
||||
'Table name': 'Nom de la taula',
|
||||
'test': 'provar',
|
||||
'Testing application': 'Provant aplicació',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "consulta" és una condición com "db.tabla1.campo1==\'valor\'". Algo com "db.tabla1.campo1==db.tabla2.campo2" resulta en un JOIN SQL.',
|
||||
'the application logic, each URL path is mapped in one exposed function in the controller': 'la lògica de la aplicació, cada ruta URL es mapeja en una funció exposada en el controlador',
|
||||
'The Core': 'El Nucli',
|
||||
'the data representation, define database tables and sets': 'la representació de dades, defineix taules i conjunts de base de dades',
|
||||
'The output of the file is a dictionary that was rendered by the view %s': 'El resultat de aquesta funció és un diccionari que és desplegat per la vista %s',
|
||||
'the presentations layer, views are also known as templates': 'la capa de presentació, les vistes també són anomenades plantilles',
|
||||
'The Views': 'Les Vistes',
|
||||
'There are no controllers': 'No hi ha controladors',
|
||||
'There are no models': 'No hi ha models',
|
||||
'There are no modules': 'No hi ha mòduls',
|
||||
'There are no static files': 'No hi ha fitxers estàtics',
|
||||
'There are no translators, only default language is supported': 'No hi ha traductors, només el llenguatge per defecte és suportat',
|
||||
'There are no views': 'No hi ha vistes',
|
||||
'these files are served without processing, your images go here': 'aquests fitxers són servits sense processar, les seves imatges van aquí',
|
||||
'This App': 'Aquesta Aplicació',
|
||||
'This email already has an account': 'Aquest correu electrònic ja té un compte',
|
||||
'This is a copy of the scaffolding application': 'Aquesta és una còpia de la aplicació de bastiment',
|
||||
'This is the %(filename)s template': 'Aquesta és la plantilla %(filename)s',
|
||||
'Ticket': 'Tiquet',
|
||||
'Time in Cache (h:m:s)': 'Temps en Caché (h:m:s)',
|
||||
'Timestamp': 'Marca de temps',
|
||||
'Title': 'Títol',
|
||||
'to previous version.': 'a la versió prèvia.',
|
||||
'To emulate a breakpoint programatically, write:': 'Emular un punto de ruptura programàticament, escribir:',
|
||||
'to use the debugger!': 'usar el depurador!',
|
||||
'toggle breakpoint': 'alternar punt de ruptura',
|
||||
'Toggle comment': 'Alternar comentari',
|
||||
'Toggle Fullscreen': 'Alternar pantalla completa',
|
||||
'too short': 'massa curt',
|
||||
'Traceback': 'Traceback',
|
||||
'translation strings for the application': 'cadenes de caracters de traducció per a la aplicació',
|
||||
'try': 'intenti',
|
||||
'try something like': 'intenti algo com',
|
||||
'TSV (Excel compatible)': 'TSV (compatible Excel)',
|
||||
'TSV (Excel compatible, hidden cols)': 'TSV (compatible Excel, columnes ocultes)',
|
||||
'TSV (Spreadsheets)': 'TSV (Fulls de càlcul)',
|
||||
'TSV (Spreadsheets, hidden cols)': 'TSV (Fulls de càlcul, columnes amagades)',
|
||||
'Twitter': 'Twitter',
|
||||
'Unable to check for upgrades': 'No és possible verificar la existencia de actualitzacions',
|
||||
'unable to create application "%s"': 'no és possible crear la aplicació "%s"',
|
||||
'unable to delete file "%(filename)s"': 'no és possible eliminar el fitxer "%(filename)s"',
|
||||
'Unable to download': 'No és possible la descàrrega',
|
||||
'Unable to download app': 'No és possible descarregar la aplicació',
|
||||
'unable to parse csv file': 'no és possible analitzar el fitxer CSV',
|
||||
'unable to uninstall "%s"': 'no és possible instalar "%s"',
|
||||
'uncheck all': 'desmarcar tots',
|
||||
'uninstall': 'desinstalar',
|
||||
'unknown': 'desconocido',
|
||||
'update': 'actualitzar',
|
||||
'update all languages': 'actualitzar tots els llenguatges',
|
||||
'Update:': 'Actualizi:',
|
||||
'upload application:': 'pujar aplicació:',
|
||||
'Upload existing application': 'Puji aquesta aplicació',
|
||||
'upload file:': 'puji fitxer:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, i ~(...) para NOT, para crear consultas més complexes.',
|
||||
'User': 'Usuari',
|
||||
'User %(id)s is impersonating %(other_id)s': 'El usuari %(id)s està suplantant %(other_id)s',
|
||||
'User %(id)s Logged-in': 'El usuari %(id)s inicià la sessió',
|
||||
'User %(id)s Logged-out': 'El usuari %(id)s finalitzà la sessió',
|
||||
'User %(id)s Password changed': 'Contrasenya del usuari %(id)s canviada',
|
||||
'User %(id)s Password reset': 'Contrasenya del usuari %(id)s reiniciada',
|
||||
'User %(id)s Profile updated': 'Actualitzat el perfil del usuari %(id)s',
|
||||
'User %(id)s Registered': 'Usuari %(id)s Registrat',
|
||||
'User %(id)s Username retrieved': 'Se ha recuperat el nom de usuari del usuari %(id)s',
|
||||
'User %(username)s Logged-in': 'El usuari %(username)s inicià la sessió',
|
||||
"User '%(username)s' Logged-in": "El usuari '%(username)s' inicià la sessió",
|
||||
"User '%(username)s' Logged-out": "El usuari '%(username)s' finalitzà la sessió",
|
||||
'User Id': 'Id de Usuari',
|
||||
'User ID': 'ID de Usuari',
|
||||
'User Logged-out': 'El usuari finalitzà la sessió',
|
||||
'Username': 'Nom de usuari',
|
||||
'Username retrieve': 'Recuperar nom de usuari',
|
||||
'Users': 'Usuaris',
|
||||
'Value already in database or empty': 'El valor ya existeix en la base de dades o està buit',
|
||||
'value already in database or empty': 'el valor ya existeix en la base de dades o està buit',
|
||||
'value not allowed': 'valor no permès',
|
||||
'Value not in database': 'El valor no està a la base de dades',
|
||||
'value not in database': 'el valor no està a la base de dades',
|
||||
'Verify Password': 'Verificar Contrasenya',
|
||||
'Version': 'Versió',
|
||||
'versioning': 'versions',
|
||||
'Videos': 'Videos',
|
||||
'View': 'Vista',
|
||||
'view': 'vista',
|
||||
'View %(entity)s': 'Veure %(entity)s',
|
||||
'View Page': 'View Page',
|
||||
'Views': 'Vistes',
|
||||
'views': 'vistes',
|
||||
'web2py is up to date': 'web2py està actualitzat',
|
||||
'web2py Recent Tweets': 'Tweets Recents de web2py',
|
||||
'Welcome': 'Benvingut',
|
||||
'Welcome %s': 'Benvingut %s',
|
||||
'Welcome to web2py': 'Benvingut a web2py',
|
||||
'Welcome to web2py!': '¡Benvingut a web2py!',
|
||||
'Which called the function %s located in the file %s': 'La qual va cridar la funció %s localitzada en el fitxer %s',
|
||||
'Wiki Page': 'Wiki Page',
|
||||
'Working...': 'Treballant ...',
|
||||
'XML': 'XML',
|
||||
'XML export of columns shown': 'XML export of columns shown',
|
||||
'YES': 'SÍ',
|
||||
'You are successfully running web2py': 'Vostè està executant web2py amb èxit',
|
||||
'You can modify this application and adapt it to your needs': 'Vostè pot modificar aquesta aplicació i adaptar-la a les seves necessitats',
|
||||
'You visited the url %s': 'Vostè va visitar la url %s',
|
||||
'Your username is: %(username)s': 'El seu nom de usuari és: %(username)s',
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
{
|
||||
'!langcode!': 'my-mm',
|
||||
'!langname!': 'မြန်မာ',
|
||||
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
|
||||
'%s %%{row} deleted': '%s %%{row} ဖျက်ပြီးပြီ',
|
||||
'%s %%{row} updated': '%s %%{row} ပြင်ပြီးပြီ',
|
||||
'%s selected': '%s ခု ရွေးထားသည်',
|
||||
'%Y-%m-%d': '%Y-%m-%d',
|
||||
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
|
||||
'(requires internet access, experimental)': '(requires internet access, experimental)',
|
||||
'(something like "it-it")': '(something like "it-it")',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
|
||||
'About': 'အကြောင်း',
|
||||
'Access Control': 'အသုံးပြု ခြင်းဆိုင်ရာ ထိန်းချုပ်ရန်',
|
||||
'Additional code for your application': 'Additional code for your application',
|
||||
'Admin language': 'Admin language',
|
||||
'administrative interface': 'administrative interface',
|
||||
'Administrative Interface': 'စီမံခန့်ခွဲရာ အင်တာဖေ့စ်',
|
||||
'Administrator Password:': 'Administrator Password:',
|
||||
'Ajax Recipes': 'Ajax Recipes',
|
||||
'and rename it:': 'and rename it:',
|
||||
'appadmin is disabled because insecure channel': 'စိတ်မချရသော လမ်းကြောင်းမှ ဝင်ရောက်သဖြင့် appadmin ကို အသုံးပြု၍ မရပါ',
|
||||
'Application name:': 'Application name:',
|
||||
'are not used': 'အသုံးမပြုပါ',
|
||||
'are not used yet': 'အသုံးမပြုသေးပါ',
|
||||
'Are you sure you want to delete this object?': 'သင် ဒီအရာ ဖျက်ရန် သေချာပါသလား။',
|
||||
'Available Databases and Tables': 'အသုံးပြုနိုင်သော ဒေတာဘေစ့်များနှင့် ဇယားများ',
|
||||
'Buy this book': 'ဒီစာအုပ်ကို ဝယ်ပါ',
|
||||
'cache': 'cache',
|
||||
'Cache': 'Cache',
|
||||
'Cache Keys': 'Cache Keys',
|
||||
'can be a git repo': 'can be a git repo',
|
||||
'Cannot be empty': 'အလွတ် မဖြစ်ရပါ',
|
||||
'Change admin password': 'Change admin password',
|
||||
'Check to delete': 'ဖျက်ရန် စစ်ဆေးပါ',
|
||||
'Checking for upgrades...': 'အဆင့်မြှင့်တင်မှုများအတွက် စစ်ဆေးနေသည် ...',
|
||||
'Clean': 'ရှင်းလင်းရန်',
|
||||
'Clear CACHE?': 'CACHE ကို ရှင်းလင်းမည်မှာ ဟုတ်ပါသလား။',
|
||||
'Clear DISK': 'DISK ကို ရှင်းလင်းမည်။',
|
||||
'Clear RAM': 'RAM ကို ရှင်းလင်းမည်။',
|
||||
'Client IP': 'Client IP',
|
||||
'collapse/expand all': 'collapse/expand all',
|
||||
'Community': 'အသိုင်းအဝိုင်း',
|
||||
'Compile': 'Compile',
|
||||
'Components and Plugins': 'Components and Plugins',
|
||||
'Controller': 'ကွန်ထရိုလာ',
|
||||
'Controllers': 'ကွန်ထရိုလာများ',
|
||||
'controllers': 'controllers',
|
||||
'Copyright': 'မူပိုင်ခွင့်',
|
||||
'Create': 'ဖန်တီးရန်',
|
||||
'create file with filename:': 'create file with filename:',
|
||||
'Create/Upload': 'Create/Upload',
|
||||
'created by': 'ဖန်းတီးသူ',
|
||||
'Created By': 'ပြုလုပ်ဖန်တီးသူ',
|
||||
'Created On': 'ပြုလုပ်ဖန်တီးသည့်အချိန်',
|
||||
'crontab': 'crontab',
|
||||
'Current request': 'Current request',
|
||||
'Current response': 'Current response',
|
||||
'Current session': 'Current session',
|
||||
'currently running': 'လက်ရှိတွင် လုပ်ဆောင်နေသည်',
|
||||
'data uploaded': 'data uploaded',
|
||||
'Database': 'ဒေတာဘေစ့်',
|
||||
'Database %s select': 'Database %s select',
|
||||
'database administration': 'ဒေတာဘေ့(စ်) စီမံခန့်ခွဲခြင်း',
|
||||
'Database Administration (appadmin)': 'ဒေတာဘေစ့် စီမံခန့်ခွဲခြင်း (appadmin)',
|
||||
'db': 'db',
|
||||
'DB Model': 'DB Model',
|
||||
'Debug': 'အမှားရှာရန်',
|
||||
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
|
||||
'Delete:': 'Delete:',
|
||||
'Demo': 'အစမ်း၊ သရုပ်ပြမှုများ',
|
||||
'Deploy': 'Deploy',
|
||||
'Deploy on Google App Engine': 'Deploy on Google App Engine',
|
||||
'Deploy to OpenShift': 'Deploy to OpenShift',
|
||||
'Deployment Recipes': 'Deployment Recipes',
|
||||
'Description': 'ဖော်ပြချက်',
|
||||
'design': 'design',
|
||||
'direction: ltr': 'direction: ltr',
|
||||
'Disable': 'ပိတ်ရန်',
|
||||
'DISK': 'DISK',
|
||||
'Disk Cache Keys': 'Disk Cache Keys',
|
||||
'Disk Cleared': 'Disk ရှင်းလင်းပြီးပြီ',
|
||||
'Documentation': 'စာရွက်စာတမ်း အထောက်အကူများ',
|
||||
"Don't know what to do?": 'ဘာလုပ်ရမည်မသိ ဖြစ်နေပါသလား။',
|
||||
'done!': 'လုပ်ငန်း ဆောင်ရွက်ပြီးပြီ!',
|
||||
'Download': 'Download',
|
||||
'Download layouts from repository': 'Download layouts from repository',
|
||||
'Download plugins from repository': 'Download plugins from repository',
|
||||
'E-mail': 'အီးမေးလ်',
|
||||
'Edit': 'ပြင်ဆင်ရန်',
|
||||
'Edit application': 'Application ကို ပြင်ရန်',
|
||||
'Edit current record': 'လက်ရှိ မှတ်တမ်းကို ပြင်ရန်',
|
||||
'Email and SMS': 'အီးမေးလ်နှင့် SMS',
|
||||
'Enable': 'ဖွင့်ရန်',
|
||||
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
|
||||
'Errors': 'အမှားများ',
|
||||
'export as csv file': ' csv file အနေနဲ့ ထုတ်ပေးရန်',
|
||||
'exposes': 'exposes',
|
||||
'extends': 'extends',
|
||||
'FAQ': 'ဖြစ်လေ့ရှိသော ပြဿနာများ',
|
||||
'filter': 'filter',
|
||||
'First name': 'အမည်၏ ပထမဆုံး စာလုံး',
|
||||
'Forms and Validators': 'Forms and Validators',
|
||||
'Free Applications': 'အခမဲ့ Applications',
|
||||
'graph model': 'graph model',
|
||||
'Graph Model': 'Graph Model',
|
||||
'Group ID': 'Group ID',
|
||||
'Groups': 'အဖွဲ့များ',
|
||||
'Hello World': 'မင်္ဂလာပါ ကမ္ဘာကြီး။',
|
||||
'Help': 'အကူအညီ',
|
||||
'Home': 'မူလသို့',
|
||||
'How did you get here?': 'သင် ဘယ်လို ရောက်လာခဲ့သလဲ။',
|
||||
'import': 'သွင်းယူရန်',
|
||||
'Import/Export': 'သွင်းယူရန်/ထုတ်ယူရန်',
|
||||
'includes': 'includes',
|
||||
'Install': 'Install',
|
||||
'Installed applications': 'ထည့်သွင်းပြီး application များ',
|
||||
'Internal State': 'Internal State',
|
||||
'Introduction': 'မိတ်ဆက်',
|
||||
'Invalid email': 'အီးမေးလ် ဖြည့်သွင်းမှုမှားနေသည်',
|
||||
'Invalid Query': 'Invalid Query',
|
||||
'invalid request': 'invalid request',
|
||||
'Is Active': 'Is Active',
|
||||
'Key': 'Key',
|
||||
'Language': 'ဘာသာစကား',
|
||||
'languages': 'ဘာသာစကားများ',
|
||||
'Languages': 'ဘာသာစကားများ',
|
||||
'Last name': 'မျိုးနွယ်အမည်',
|
||||
'Layout': 'အပြင်အဆင်',
|
||||
'Layout Plugins': 'Layout Plugins',
|
||||
'Layouts': 'အပြင်အဆင်များ',
|
||||
'Live Chat': 'တိုက်ရိုက် ဆက်သွယ် ပြောကြားရန်',
|
||||
'Login': 'ဝင်ရောက်အသုံးပြုရန်',
|
||||
'Login to the Administrative Interface': 'Login to the Administrative Interface',
|
||||
'Logout': 'ထွက်ရန်',
|
||||
'Lost Password': 'စကားဝှက် မသိတော့ပါ',
|
||||
'Lost password?': 'စကားဝှက် မသိတော့ဘူးလား။',
|
||||
'Manage': 'စီမံခန့်ခွဲရန်',
|
||||
'Manage %(action)s': '%(action)s ကို စီမံရန်',
|
||||
'Manage Access Control': 'အသုံးပြုခြင်းဆိုင်ရာ ထိန်းချုပ်မှု စီမံခန့်ခွဲရန်',
|
||||
'Manage Cache': 'Manage Cache',
|
||||
'Memberships': 'အသင်းဝင်များ',
|
||||
'Menu Model': 'Menu Model',
|
||||
'models': 'models',
|
||||
'Models': 'Models',
|
||||
'Modified By': 'ပြင်ဆင်မွမ်းမံသူ',
|
||||
'Modified On': 'ပြင်ဆင်မွမ်းမံသည့် အချိန်',
|
||||
'Modules': 'Modules',
|
||||
'modules': 'modules',
|
||||
'My Sites': 'ကျွန်ုပ်၏ Site များ',
|
||||
'Name': 'အမည်',
|
||||
'New application wizard': 'New application wizard',
|
||||
'New Record': 'မှတ်တမ်း အသစ်',
|
||||
'new record inserted': 'မှတ်တမ်း အသစ် ဖြည့်သွင်းပြီးပြီ',
|
||||
'New simple application': 'ရိုးရိုး application အသစ်',
|
||||
'next %s rows': 'နောက်အတန်း %s တန်း',
|
||||
'No databases in this application': 'ဒီ application တွင် မည်သည့် ဒေတာဘေစ့်မှ မရှိပါ',
|
||||
'no package selected': 'no package selected',
|
||||
'Object or table name': 'Object or table name',
|
||||
'Online examples': 'အွန်လိုင်း နမူနာများ',
|
||||
'or alternatively': 'or alternatively',
|
||||
'Or Get from URL:': 'Or Get from URL:',
|
||||
'or import from csv file': 'or import from csv file',
|
||||
'Origin': 'မူလ အစ',
|
||||
'Other Plugins': 'အခြား Plugins',
|
||||
'Other Recipes': 'အခြား Recipes',
|
||||
'Overview': 'အပေါ်ယံရှုမြင်ခြင်း',
|
||||
'Overwrite installed app': 'Overwrite installed app',
|
||||
'Pack all': 'အားလုံးကို ထုပ်ပိုးရန်',
|
||||
'Pack custom': 'ရွေးချယ်ထုပ်ပိုးရန်',
|
||||
'Password': 'စကားဝှက်',
|
||||
"Password fields don't match": 'စကားဝှက်များ ကိုက်ညီမှု မရှိပါ',
|
||||
'Permission': 'ခွင့်ပြုချက်',
|
||||
'Permissions': 'ခွင့်ပြုချက်များ',
|
||||
'please input your password again': 'ကျေးဇူးပြု၍ စကားဝှက်ကို ထပ်မံ ဖြည့်သွင်းပေးပါ',
|
||||
'Plugins': 'Plugins',
|
||||
'plugins': 'plugins',
|
||||
'Plural-Forms:': 'Plural-Forms:',
|
||||
'Powered by': 'အားဖြည့်စွမ်းအားပေးသူ',
|
||||
'Preface': 'နိဒါန်း',
|
||||
'previous %s rows': 'previous %s rows',
|
||||
'Private files': 'Private files',
|
||||
'private files': 'private files',
|
||||
'pygraphviz library not found': 'pygraphviz library ကို မတွေ့ပါ',
|
||||
'Python': 'Python',
|
||||
'Query:': 'Query:',
|
||||
'Quick Examples': 'အမြန် အသုံးပြုနိုင်သော နမူနာများ',
|
||||
'RAM': 'RAM',
|
||||
'RAM Cache Keys': 'RAM Cache Keys',
|
||||
'Ram Cleared': 'Ram ရှင်းလင်းပြီးပြီ',
|
||||
'Recipes': 'Recipes',
|
||||
'Record': 'မှတ်တမ်း',
|
||||
'record does not exist': 'မှတ်တမ်း မရှိပါ',
|
||||
'Record ID': 'Record ID',
|
||||
'Record id': 'Record id',
|
||||
'Register': 'မှတ်ပုံတင်ရန်',
|
||||
'Registration identifier': 'Registration identifier',
|
||||
'Registration key': 'Registration key',
|
||||
'Reload routes': 'Reload routes',
|
||||
'Remember me (for 30 days)': 'Remember me (for 30 days)',
|
||||
'Request reset password': 'စကားဝှက် အသစ် တောင်းဆိုရန်',
|
||||
'Reset Password key': 'Reset Password key',
|
||||
'Role': 'Role',
|
||||
'Roles': 'Roles',
|
||||
'Rows in Table': 'Rows in Table',
|
||||
'Rows selected': 'ရွေးထားသော အတန်းများ',
|
||||
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
|
||||
'Running on %s': 'Running on %s',
|
||||
'Save model as...': 'Save model as...',
|
||||
'Semantic': 'Semantic',
|
||||
'Services': 'Services',
|
||||
'shell': 'shell',
|
||||
'Site': 'Site',
|
||||
'Size of cache:': 'Size of cache:',
|
||||
'Start wizard': 'Start wizard',
|
||||
'state': 'state',
|
||||
'static': 'static',
|
||||
'Static': 'Static',
|
||||
'Statistics': 'ကိန်းဂဏန်း အချက်အလက်များ',
|
||||
'Stylesheet': 'Stylesheet',
|
||||
'submit': 'ပြုလုပ်ပါ',
|
||||
'Submit': 'Submit',
|
||||
'Support': 'အထောက်အပံ့',
|
||||
'Table': 'ဇယား',
|
||||
'test': 'test',
|
||||
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
|
||||
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
|
||||
'The Core': 'The Core',
|
||||
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
|
||||
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
|
||||
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
|
||||
'The Views': 'The Views',
|
||||
'There are no plugins': 'There are no plugins',
|
||||
'There are no private files': 'There are no private files',
|
||||
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
|
||||
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
|
||||
'This App': 'ဒီ App',
|
||||
'This email already has an account': 'ဒီအီးမေးလ်တွင် အကောင့် ရှိပြီး ဖြစ်ပါသည်',
|
||||
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
|
||||
'Timestamp': 'Timestamp',
|
||||
'To create a plugin, name a file/folder plugin_[name]': 'To create a plugin, name a file/folder plugin_[name]',
|
||||
'Traceback': 'Traceback',
|
||||
'Translation strings for the application': 'Translation strings for the application',
|
||||
'Try the mobile interface': 'Try the mobile interface',
|
||||
'Twitter': 'Twitter',
|
||||
'unable to parse csv file': 'unable to parse csv file',
|
||||
'Uninstall': 'Uninstall',
|
||||
'update all languages': 'update all languages',
|
||||
'Update:': 'Update:',
|
||||
'Upload': 'Upload',
|
||||
'Upload a package:': 'Upload a package:',
|
||||
'Upload and install packed application': 'Upload and install packed application',
|
||||
'upload file:': 'upload file:',
|
||||
'upload plugin file:': 'upload plugin file:',
|
||||
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
|
||||
'User': 'အသုံးပြုသူ',
|
||||
'User ID': 'User ID',
|
||||
'Users': 'အသုံးပြုသူများ',
|
||||
'Verify Password': 'စကားဝှက်ကို အတည်ပြုပါ',
|
||||
'Version': 'Version',
|
||||
'Versioning': 'Versioning',
|
||||
'Videos': 'ဗွီဒီယိုများ',
|
||||
'View': 'ဗျူး',
|
||||
'views': 'views',
|
||||
'Views': 'ဗျူးများ',
|
||||
'Web Framework': 'Web Framework',
|
||||
'Welcome': 'ကြိုဆိုပါ၏',
|
||||
'Welcome to web2py!': 'web2py မှ ကြိုဆိုပါသည်။',
|
||||
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
|
||||
'Working...': 'ဆောင်ရွက်နေပါသည် ။ ။ ။',
|
||||
'You are successfully running web2py': 'သင်သည် web2py ကို အောင်မြင်စွာ လည်ပတ်မောင်းနှင်စေပါသည်။',
|
||||
'You can modify this application and adapt it to your needs': 'သင် ဒီ application ကို ပြုပြင်မွမ်းမံနိုင်ပါသည်။ ထို့အပြင် သင့်လိုအပ်ချက်များနှင့် ကိုက်ညီစေရန် ပြုလုပ်နိုင်ပါသည်။',
|
||||
'You visited the url %s': 'သင် လည်ပတ်ခဲ့သော URL %s',
|
||||
'စကားဝှက် အသစ် တောင်းဆိုရန်': 'စကားဝှက် အသစ် တောင်းဆိုရန်',
|
||||
'မှတ်ပုံတင်ရန်': 'မှတ်ပုံတင်ရန်',
|
||||
'ဝင်ရောက်အသုံးပြုရန်': 'ဝင်ရောက်အသုံးပြုရန်',
|
||||
}
|
||||
@@ -85,3 +85,4 @@ use_janrain(auth, filename='private/janrain.key')
|
||||
|
||||
## after defining tables, uncomment below to enable auditing
|
||||
# auth.enable_record_versioning(db)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,35 +12,45 @@
|
||||
{{if request.function=='index':}}
|
||||
<h2>{{=T("Available Databases and Tables")}}</h2>
|
||||
{{if not databases:}}{{=T("No databases in this application")}}{{pass}}
|
||||
<table>
|
||||
{{for db in sorted(databases):}}
|
||||
{{for table in databases[db].tables:}}
|
||||
{{qry='%s.%s.id>0'%(db,table)}}
|
||||
{{tbl=databases[db][table]}}
|
||||
{{if hasattr(tbl,'_primarykey'):}}
|
||||
{{if tbl._primarykey:}}
|
||||
{{firstkey=tbl[tbl._primarykey[0]]}}
|
||||
{{if firstkey.type in ['string','text']:}}
|
||||
{{qry='%s.%s.%s!=""'%(db,table,firstkey.name)}}
|
||||
{{else:}}
|
||||
{{qry='%s.%s.%s>0'%(db,table,firstkey.name)}}
|
||||
{{pass}}
|
||||
{{else:}}
|
||||
{{qry=''}}
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
<tr>
|
||||
<th style="font-size: 1.75em;">
|
||||
{{=A("%s.%s" % (db,table),_href=URL('select',args=[db],vars=dict(query=qry)))}}
|
||||
</th>
|
||||
<td>
|
||||
{{=A(str(T('New Record')),_href=URL('insert',args=[db,table]),_class="btn")}}
|
||||
</td>
|
||||
</tr>
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
</table>
|
||||
|
||||
<ul class="nav nav-tabs" id="myTab">
|
||||
<li class="active" ><a href="#alltables" data-toggle="tab">Tables</a></li>
|
||||
<li><a href="#hooks" data-toggle="tab">Hooks</a></li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div class="tab-pane active" id="alltables">
|
||||
<table>
|
||||
{{for db in sorted(databases):}}
|
||||
{{for table in databases[db].tables:}}
|
||||
{{qry='%s.%s.id>0'%(db,table)}}
|
||||
{{tbl=databases[db][table]}}
|
||||
{{if hasattr(tbl,'_primarykey'):}}
|
||||
{{if tbl._primarykey:}}
|
||||
{{firstkey=tbl[tbl._primarykey[0]]}}
|
||||
{{if firstkey.type in ['string','text']:}}
|
||||
{{qry='%s.%s.%s!=""'%(db,table,firstkey.name)}}
|
||||
{{else:}}
|
||||
{{qry='%s.%s.%s>0'%(db,table,firstkey.name)}}
|
||||
{{pass}}
|
||||
{{else:}}
|
||||
{{qry=''}}
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
<tr>
|
||||
<th style="font-size: 1.75em;">
|
||||
{{=A("%s.%s" % (db,table),_href=URL('select',args=[db],vars=dict(query=qry)))}}
|
||||
</th>
|
||||
<td>
|
||||
{{=A(str(T('New Record')),_href=URL('insert',args=[db,table]),_class="btn")}}
|
||||
</td>
|
||||
</tr>
|
||||
{{pass}}
|
||||
{{pass}}
|
||||
</table>
|
||||
</div>
|
||||
<div class="tab-pane" id="hooks">
|
||||
{{=LOAD('appadmin', 'hooks', ajax=True)}}
|
||||
</div>
|
||||
</div>
|
||||
{{elif request.function=='select':}}
|
||||
<h2>{{=XML(str(T("Database %s select"))%A(request.args[0],_href=URL('index'))) }}
|
||||
</h2>
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
gluon.dal.adapters package
|
||||
==========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
gluon.dal.adapters.base module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.base
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.couchdb module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.couchdb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.cubrid module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.cubrid
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.db2 module
|
||||
-----------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.db2
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.firebird module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.firebird
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.google module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.google
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.imap module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.imap
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.informix module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.informix
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.ingres module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.ingres
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.mongo module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.mongo
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.mssql module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.mssql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.mysql module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.mysql
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.oracle module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.oracle
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.postgre module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.postgre
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.sapdb module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.sapdb
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.sqlite module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.sqlite
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.adapters.teradata module
|
||||
----------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters.teradata
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: gluon.dal.adapters
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@@ -0,0 +1,38 @@
|
||||
gluon.dal.helpers package
|
||||
=========================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
gluon.dal.helpers.classes module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.helpers.classes
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.helpers.methods module
|
||||
--------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.helpers.methods
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.helpers.regex module
|
||||
------------------------------
|
||||
|
||||
.. automodule:: gluon.dal.helpers.regex
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: gluon.dal.helpers
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
+40
-2
@@ -1,6 +1,44 @@
|
||||
gluon.dal package
|
||||
=================
|
||||
|
||||
:mod:`dal` Module
|
||||
-----------------
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
|
||||
dal.adapters
|
||||
dal.helpers
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
gluon.dal.base module
|
||||
---------------------
|
||||
|
||||
.. automodule:: gluon.dal.base
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.connection module
|
||||
---------------------------
|
||||
|
||||
.. automodule:: gluon.dal.connection
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
gluon.dal.objects module
|
||||
------------------------
|
||||
|
||||
.. automodule:: gluon.dal.objects
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: gluon.dal
|
||||
:members:
|
||||
|
||||
@@ -25,9 +25,9 @@ import sys
|
||||
import re
|
||||
import zipfile
|
||||
|
||||
#read web2py version from VERSION file
|
||||
#read web2py version from VERSION file
|
||||
web2py_version_line = readlines_file('VERSION')[0]
|
||||
#use regular expression to get just the version number
|
||||
#use regular expression to get just the version number
|
||||
v_re = re.compile('[0-9]+\.[0-9]+\.[0-9]+')
|
||||
web2py_version = v_re.search(web2py_version_line).group(0)
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
#Adapted from http://bazaar.launchpad.net/~flavour/sahana-eden/trunk/view/head:/static/scripts/tools/standalone_exe.py
|
||||
|
||||
|
||||
USAGE = """
|
||||
Usage:
|
||||
Copy this and setup_exe.conf to web2py root folder
|
||||
@@ -13,7 +13,7 @@ Usage:
|
||||
Install bbfreeze: https://pypi.python.org/pypi/bbfreeze/
|
||||
run python setup_exe.py bbfreeze
|
||||
"""
|
||||
|
||||
|
||||
from distutils.core import setup
|
||||
from gluon.import_all import base_modules, contributed_modules
|
||||
from gluon.fileutils import readlines_file
|
||||
@@ -24,7 +24,7 @@ import shutil
|
||||
import sys
|
||||
import re
|
||||
import zipfile
|
||||
|
||||
|
||||
if len(sys.argv) != 2 or not os.path.isfile('web2py.py'):
|
||||
print USAGE
|
||||
sys.exit(1)
|
||||
@@ -32,11 +32,11 @@ BUILD_MODE = sys.argv[1]
|
||||
if not BUILD_MODE in ('py2exe', 'bbfreeze'):
|
||||
print USAGE
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def unzip(source_filename, dest_dir):
|
||||
with zipfile.ZipFile(source_filename) as zf:
|
||||
zf.extractall(dest_dir)
|
||||
|
||||
|
||||
#borrowed from http://bytes.com/topic/python/answers/851018-how-zip-directory-python-using-zipfile
|
||||
def recursive_zip(zipf, directory, folder=""):
|
||||
for item in os.listdir(directory):
|
||||
@@ -45,14 +45,14 @@ def recursive_zip(zipf, directory, folder=""):
|
||||
elif os.path.isdir(os.path.join(directory, item)):
|
||||
recursive_zip(
|
||||
zipf, os.path.join(directory, item), folder + os.sep + item)
|
||||
|
||||
|
||||
|
||||
|
||||
#read web2py version from VERSION file
|
||||
web2py_version_line = readlines_file('VERSION')[0]
|
||||
#use regular expression to get just the version number
|
||||
v_re = re.compile('[0-9]+\.[0-9]+\.[0-9]+')
|
||||
web2py_version = v_re.search(web2py_version_line).group(0)
|
||||
|
||||
|
||||
#pull in preferences from config file
|
||||
import ConfigParser
|
||||
Config = ConfigParser.ConfigParser()
|
||||
@@ -68,12 +68,12 @@ include_gevent = Config.getboolean("Setup", "include_gevent")
|
||||
|
||||
# Python base version
|
||||
python_version = sys.version_info[:3]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if BUILD_MODE == 'py2exe':
|
||||
import py2exe
|
||||
|
||||
|
||||
setup(
|
||||
console=[{'script':'web2py.py',
|
||||
'icon_resources': [(0, 'extras/icons/web2py.ico')]
|
||||
@@ -88,8 +88,8 @@ if BUILD_MODE == 'py2exe':
|
||||
author="Massimo DiPierro",
|
||||
license="LGPL v3",
|
||||
data_files=[
|
||||
'ABOUT',
|
||||
'LICENSE',
|
||||
'ABOUT',
|
||||
'LICENSE',
|
||||
'VERSION'
|
||||
],
|
||||
options={'py2exe': {
|
||||
@@ -108,7 +108,7 @@ if BUILD_MODE == 'py2exe':
|
||||
zipl.close()
|
||||
shutil.rmtree(library_temp_dir)
|
||||
print "web2py binary successfully built"
|
||||
|
||||
|
||||
elif BUILD_MODE == 'bbfreeze':
|
||||
modules = base_modules + contributed_modules
|
||||
from bbfreeze import Freezer
|
||||
@@ -131,26 +131,26 @@ elif BUILD_MODE == 'bbfreeze':
|
||||
for req in ['ABOUT', 'LICENSE', 'VERSION']:
|
||||
shutil.copy(req, os.path.join('dist', req))
|
||||
print "web2py binary successfully built"
|
||||
|
||||
|
||||
try:
|
||||
os.unlink('storage.sqlite')
|
||||
except:
|
||||
pass
|
||||
|
||||
#This need to happen after bbfreeze is run because Freezer() deletes distdir before starting!
|
||||
|
||||
#This need to happen after bbfreeze is run because Freezer() deletes distdir before starting!
|
||||
if python_version > (2,5):
|
||||
# Python26 compatibility: http://www.py2exe.org/index.cgi/Tutorial#Step52
|
||||
try:
|
||||
shutil.copytree('C:\Bin\Microsoft.VC90.CRT', 'dist/Microsoft.VC90.CRT/')
|
||||
except:
|
||||
print "You MUST copy Microsoft.VC90.CRT folder into the archive"
|
||||
|
||||
|
||||
def copy_folders(source, destination):
|
||||
"""Copy files & folders from source to destination (within dist/)"""
|
||||
if os.path.exists(os.path.join('dist', destination)):
|
||||
shutil.rmtree(os.path.join('dist', destination))
|
||||
shutil.copytree(os.path.join(source), os.path.join('dist', destination))
|
||||
|
||||
|
||||
#should we remove Windows OS dlls user is unlikely to be able to distribute
|
||||
if remove_msft_dlls:
|
||||
print "Deleted Microsoft files not licensed for open source distribution"
|
||||
@@ -166,7 +166,7 @@ if remove_msft_dlls:
|
||||
os.unlink(os.path.join('dist', f))
|
||||
except:
|
||||
print "unable to delete dist/" + f
|
||||
|
||||
|
||||
#Should we include applications?
|
||||
if copy_apps:
|
||||
copy_folders('applications', 'applications')
|
||||
@@ -177,12 +177,12 @@ else:
|
||||
copy_folders('applications/welcome', 'applications/welcome')
|
||||
copy_folders('applications/examples', 'applications/examples')
|
||||
print "Only web2py's admin, examples & welcome applications have been added"
|
||||
|
||||
|
||||
copy_folders('extras', 'extras')
|
||||
copy_folders('examples', 'examples')
|
||||
copy_folders('handlers', 'handlers')
|
||||
|
||||
|
||||
|
||||
|
||||
#should we copy project's site-packages into dist/site-packages
|
||||
if copy_site_packages:
|
||||
#copy site-packages
|
||||
@@ -190,7 +190,7 @@ if copy_site_packages:
|
||||
else:
|
||||
#no worries, web2py will create the (empty) folder first run
|
||||
print "Skipping site-packages"
|
||||
|
||||
|
||||
#should we copy project's scripts into dist/scripts
|
||||
if copy_scripts:
|
||||
#copy scripts
|
||||
@@ -198,7 +198,7 @@ if copy_scripts:
|
||||
else:
|
||||
#no worries, web2py will create the (empty) folder first run
|
||||
print "Skipping scripts"
|
||||
|
||||
|
||||
#should we create a zip file of the build?
|
||||
if make_zip:
|
||||
#create a web2py folder & copy dist's files into it
|
||||
@@ -209,13 +209,13 @@ if make_zip:
|
||||
# just temp so the web2py directory is included in our zip file
|
||||
path = 'zip_temp'
|
||||
# leave the first folder as None, as path is root.
|
||||
recursive_zip(zipf, path)
|
||||
recursive_zip(zipf, path)
|
||||
zipf.close()
|
||||
shutil.rmtree('zip_temp')
|
||||
print "Your Windows binary version of web2py can be found in " + \
|
||||
zip_filename + ".zip"
|
||||
print "You may extract the archive anywhere and then run web2py/web2py.exe"
|
||||
|
||||
|
||||
#should py2exe build files be removed?
|
||||
if remove_build_files:
|
||||
if BUILD_MODE == 'py2exe':
|
||||
@@ -223,10 +223,10 @@ if remove_build_files:
|
||||
shutil.rmtree('deposit')
|
||||
shutil.rmtree('dist')
|
||||
print "build files removed"
|
||||
|
||||
|
||||
#final info
|
||||
if not make_zip and not remove_build_files:
|
||||
print "Your Windows binary & associated files can also be found in /dist"
|
||||
|
||||
|
||||
print "Finished!"
|
||||
print "Enjoy web2py " + web2py_version_line
|
||||
print "Enjoy web2py " + web2py_version_line
|
||||
|
||||
+1
-1
@@ -11,7 +11,7 @@ Web2Py framework modules
|
||||
"""
|
||||
|
||||
__all__ = ['A', 'B', 'BEAUTIFY', 'BODY', 'BR', 'CAT', 'CENTER', 'CLEANUP', 'CODE', 'CRYPT', 'DAL', 'DIV', 'EM', 'EMBED', 'FIELDSET', 'FORM', 'Field', 'H1', 'H2', 'H3', 'H4', 'H5', 'H6', 'HEAD', 'HR', 'HTML', 'HTTP', 'I', 'IFRAME', 'IMG', 'INPUT', 'IS_ALPHANUMERIC', 'IS_DATE', 'IS_DATETIME', 'IS_DATETIME_IN_RANGE', 'IS_DATE_IN_RANGE', 'IS_DECIMAL_IN_RANGE', 'IS_EMAIL', 'IS_LIST_OF_EMAILS', 'IS_EMPTY_OR', 'IS_EQUAL_TO', 'IS_EXPR', 'IS_FLOAT_IN_RANGE', 'IS_IMAGE', 'IS_JSON', 'IS_INT_IN_RANGE', 'IS_IN_DB', 'IS_IN_SET', 'IS_IPV4', 'IS_LENGTH', 'IS_LIST_OF', 'IS_LOWER', 'IS_MATCH', 'IS_NOT_EMPTY', 'IS_NOT_IN_DB', 'IS_NULL_OR', 'IS_SLUG', 'IS_STRONG', 'IS_TIME', 'IS_UPLOAD_FILENAME', 'IS_UPPER', 'IS_URL', 'LABEL', 'LEGEND', 'LI', 'LINK', 'LOAD', 'MARKMIN', 'MENU', 'META', 'OBJECT', 'OL', 'ON', 'OPTGROUP', 'OPTION', 'P', 'PRE', 'SCRIPT', 'SELECT', 'SPAN', 'SQLFORM', 'SQLTABLE', 'STRONG', 'STYLE', 'TABLE', 'TAG', 'TBODY', 'TD', 'TEXTAREA', 'TFOOT', 'TH', 'THEAD', 'TITLE', 'TR', 'TT', 'UL', 'URL', 'XHTML', 'XML', 'redirect', 'current', 'embed64']
|
||||
|
||||
|
||||
from globals import current
|
||||
from html import *
|
||||
from validators import *
|
||||
|
||||
+693
-654
File diff suppressed because it is too large
Load Diff
+8
-7
@@ -25,7 +25,8 @@ from gluon.restricted import restricted, compile2
|
||||
from gluon.fileutils import mktree, listdir, read_file, write_file
|
||||
from gluon.myregex import regex_expose, regex_longcomments
|
||||
from gluon.languages import translator
|
||||
from gluon.dal import BaseAdapter, SQLDB, SQLField, DAL, Field
|
||||
from gluon.dal import DAL, Field
|
||||
from gluon.dal.base import BaseAdapter
|
||||
from gluon.sqlhtml import SQLFORM, SQLTABLE
|
||||
from gluon.cache import Cache
|
||||
from gluon.globals import current, Response
|
||||
@@ -126,7 +127,7 @@ class mybuiltin(object):
|
||||
def LOAD(c=None, f='index', args=None, vars=None,
|
||||
extension=None, target=None, ajax=False, ajax_trap=False,
|
||||
url=None, user_signature=False, timeout=None, times=1,
|
||||
content='loading...', **attr):
|
||||
content='loading...', post_vars=Storage(), **attr):
|
||||
""" LOADs a component into the action's document
|
||||
|
||||
Args:
|
||||
@@ -201,7 +202,7 @@ def LOAD(c=None, f='index', args=None, vars=None,
|
||||
other_request.args = List(args)
|
||||
other_request.vars = vars
|
||||
other_request.get_vars = vars
|
||||
other_request.post_vars = Storage()
|
||||
other_request.post_vars = post_vars
|
||||
other_response = Response()
|
||||
other_request.env.path_info = '/' + \
|
||||
'/'.join([request.application, c, f] +
|
||||
@@ -388,8 +389,8 @@ _base_environment_['HTTP'] = HTTP
|
||||
_base_environment_['redirect'] = redirect
|
||||
_base_environment_['DAL'] = DAL
|
||||
_base_environment_['Field'] = Field
|
||||
_base_environment_['SQLDB'] = SQLDB # for backward compatibility
|
||||
_base_environment_['SQLField'] = SQLField # for backward compatibility
|
||||
_base_environment_['SQLDB'] = DAL # for backward compatibility
|
||||
_base_environment_['SQLField'] = Field # for backward compatibility
|
||||
_base_environment_['SQLFORM'] = SQLFORM
|
||||
_base_environment_['SQLTABLE'] = SQLTABLE
|
||||
_base_environment_['LOAD'] = LOAD
|
||||
@@ -406,7 +407,7 @@ def build_environment(request, response, session, store_current=True):
|
||||
# Enable standard conditional models (i.e., /*.py, /[controller]/*.py, and
|
||||
# /[controller]/[function]/*.py)
|
||||
response.models_to_run = [
|
||||
r'^\w+\.py$',
|
||||
r'^\w+\.py$',
|
||||
r'^%s/\w+\.py$' % request.controller,
|
||||
r'^%s/%s/\w+\.py$' % (request.controller, request.function)
|
||||
]
|
||||
@@ -513,7 +514,7 @@ def compile_controllers(folder):
|
||||
for function in exposed:
|
||||
command = data + "\nresponse._vars=response._caller(%s)\n" % \
|
||||
function
|
||||
filename = pjoin(folder, 'compiled',
|
||||
filename = pjoin(folder, 'compiled',
|
||||
'controllers.%s.%s.py' % (fname[:-3],function))
|
||||
write_file(filename, command)
|
||||
save_pyc(filename)
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Read from configuration files easily without hurting performances
|
||||
|
||||
USAGE:
|
||||
During development you can load a config file either in .ini or .json
|
||||
format (by default app/private/appconfig.ini or app/private/appconfig.json)
|
||||
The result is a dict holding the configured values. Passing reload=True
|
||||
is meant only for development: in production, leave reload to False and all
|
||||
values will be cached
|
||||
|
||||
from gluon.contrib.appconfig import AppConfig
|
||||
myconfig = AppConfig(path_to_configfile, reload=False)
|
||||
|
||||
print myconfig['db']['uri']
|
||||
|
||||
The returned dict can walk with "dot notation" an arbitrarely nested dict
|
||||
|
||||
print myconfig.take('db.uri')
|
||||
|
||||
You can even pass a cast function, i.e.
|
||||
|
||||
print myconfig.take('auth.expiration', cast=int)
|
||||
|
||||
Once the value has been fetched (and casted) it won't change until the process
|
||||
is restarted (or reload=True is passed).
|
||||
|
||||
"""
|
||||
import thread
|
||||
import os
|
||||
from ConfigParser import SafeConfigParser
|
||||
from gluon import current
|
||||
from gluon.serializers import json_parser
|
||||
|
||||
locker = thread.allocate_lock()
|
||||
|
||||
|
||||
def AppConfig(*args, **vars):
|
||||
|
||||
locker.acquire()
|
||||
reload_ = vars.pop('reload', False)
|
||||
try:
|
||||
instance_name = 'AppConfig_' + current.request.application
|
||||
if reload_ or not hasattr(AppConfig, instance_name):
|
||||
setattr(AppConfig, instance_name, AppConfigLoader(*args, **vars))
|
||||
return getattr(AppConfig, instance_name).settings
|
||||
finally:
|
||||
locker.release()
|
||||
|
||||
|
||||
class AppConfigDict(dict):
|
||||
"""
|
||||
dict that has a .take() method to fetch nested values and puts
|
||||
them into cache
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
dict.__init__(self, *args, **kwargs)
|
||||
self.int_cache = {}
|
||||
|
||||
def take(self, path, cast=None):
|
||||
parts = path.split('.')
|
||||
if path in self.int_cache:
|
||||
return self.int_cache[path]
|
||||
value = self
|
||||
walking = []
|
||||
for part in parts:
|
||||
if part not in value:
|
||||
raise BaseException("%s not in config [%s]" %
|
||||
(part, '-->'.join(walking)))
|
||||
value = value[part]
|
||||
walking.append(part)
|
||||
if cast is None:
|
||||
self.int_cache[path] = value
|
||||
else:
|
||||
try:
|
||||
value = cast(value)
|
||||
self.int_cache[path] = value
|
||||
except (ValueError, TypeError):
|
||||
raise BaseException("%s can't be converted to %s" %
|
||||
(value, cast))
|
||||
return value
|
||||
|
||||
|
||||
class AppConfigLoader(object):
|
||||
|
||||
def __init__(self, configfile=None):
|
||||
if not configfile:
|
||||
priv_folder = os.path.join(current.request.folder, 'private')
|
||||
configfile = os.path.join(priv_folder, 'appconfig.ini')
|
||||
if not os.path.isfile(configfile):
|
||||
configfile = os.path.join(priv_folder, 'appconfig.json')
|
||||
if not os.path.isfile(configfile):
|
||||
configfile = None
|
||||
if not configfile or not os.path.isfile(configfile):
|
||||
raise BaseException("Config file not found")
|
||||
self.file = configfile
|
||||
self.ctype = os.path.splitext(configfile)[1][1:]
|
||||
self.settings = None
|
||||
self.read_config()
|
||||
|
||||
def read_config_ini(self):
|
||||
config = SafeConfigParser()
|
||||
config.read(self.file)
|
||||
settings = {}
|
||||
for section in config.sections():
|
||||
settings[section] = {}
|
||||
for option in config.options(section):
|
||||
settings[section][option] = config.get(section, option)
|
||||
self.settings = AppConfigDict(settings)
|
||||
|
||||
def read_config_json(self):
|
||||
with open(self.file, 'r') as c:
|
||||
self.settings = AppConfigDict(json_parser.load(c))
|
||||
|
||||
def read_config(self):
|
||||
if self.settings is None:
|
||||
try:
|
||||
getattr(self, 'read_config_' + self.ctype)()
|
||||
except AttributeError:
|
||||
raise BaseException("Unsupported config file format")
|
||||
return self.settings
|
||||
@@ -7,7 +7,8 @@ db = get_db()
|
||||
"""
|
||||
import os
|
||||
from gluon import *
|
||||
from gluon.dal import ADAPTERS, UseDatabaseStoredFile,PostgreSQLAdapter
|
||||
from gluon.dal.adapters import ADAPTERS, PostgreSQLAdapter
|
||||
from gluon.dal.helpers.classes import UseDatabaseStoredFile
|
||||
|
||||
class HerokuPostgresAdapter(UseDatabaseStoredFile,PostgreSQLAdapter):
|
||||
drivers = ('psycopg2',)
|
||||
|
||||
+12
-11
@@ -47,7 +47,7 @@ class Collection(object):
|
||||
if self.compact:
|
||||
for fieldname in (self.table_policy.get('fields',table.fields)):
|
||||
field = table[fieldname]
|
||||
if not ((field.type=='text' and text==False) or
|
||||
if not ((field.type=='text' and text==False) or
|
||||
field.type=='blob' or
|
||||
field.type.startswith('reference ') or
|
||||
field.type.startswith('list:reference ')) and field.name in row:
|
||||
@@ -56,7 +56,7 @@ class Collection(object):
|
||||
for fieldname in (self.table_policy.get('fields',table.fields)):
|
||||
field = table[fieldname]
|
||||
if not ((field.type=='text' and text==False) or
|
||||
field.type=='blob' or
|
||||
field.type=='blob' or
|
||||
field.type.startswith('reference ') or
|
||||
field.type.startswith('list:reference ')) and field.name in row:
|
||||
data.append({'name':field.name,'value':row[field.name],
|
||||
@@ -128,10 +128,10 @@ class Collection(object):
|
||||
for key,value in vars.items():
|
||||
if key=='_offset':
|
||||
limitby[0] = int(value) # MAY FAIL
|
||||
elif key == '_limit':
|
||||
elif key == '_limit':
|
||||
limitby[1] = int(value)+1 # MAY FAIL
|
||||
elif key=='_orderby':
|
||||
orderby = value
|
||||
orderby = value
|
||||
elif key in fieldnames:
|
||||
queries.append(table[key] == value)
|
||||
elif key.endswith('.eq') and key[:-3] in fieldnames: # for completeness (useless)
|
||||
@@ -156,14 +156,14 @@ class Collection(object):
|
||||
if filter_query:
|
||||
queries.append(filter_query)
|
||||
query = reduce(lambda a,b:a&b,queries[1:]) if len(queries)>1 else queries[0]
|
||||
orderby = [table[f] if f[0]!='~' else ~table[f[1:]] for f in orderby.split(',')]
|
||||
orderby = [table[f] if f[0]!='~' else ~table[f[1:]] for f in orderby.split(',')]
|
||||
return (query, limitby, orderby)
|
||||
|
||||
def table2queries(self,table, href):
|
||||
""" generates a set of collection.queries examples for the table """
|
||||
data = []
|
||||
for fieldname in (self.table_policy.get('fields', table.fields)):
|
||||
data.append({'name':fieldname,'value':''})
|
||||
data.append({'name':fieldname,'value':''})
|
||||
if self.extensions:
|
||||
data.append({'name':fieldname+'.ne','value':''}) # NEW !!!
|
||||
data.append({'name':fieldname+'.lt','value':''})
|
||||
@@ -192,7 +192,7 @@ class Collection(object):
|
||||
if not tablename:
|
||||
r['href'] = URL(scheme=True),
|
||||
# https://github.com/collection-json/extensions/blob/master/model.md
|
||||
r['links'] = [{'rel' : t, 'href' : URL(args=t,scheme=True), 'model':t}
|
||||
r['links'] = [{'rel' : t, 'href' : URL(args=t,scheme=True), 'model':t}
|
||||
for t in tablenames]
|
||||
response.headers['Content-Type'] = 'application/vnd.collection+json'
|
||||
return response.json({'collection':r})
|
||||
@@ -207,7 +207,7 @@ class Collection(object):
|
||||
# process GET
|
||||
if request.env.request_method=='GET':
|
||||
table = db[tablename]
|
||||
r['href'] = URL(args=tablename)
|
||||
r['href'] = URL(args=tablename)
|
||||
r['items'] = items = []
|
||||
try:
|
||||
(query, limitby, orderby) = self.request2query(table,request.get_vars)
|
||||
@@ -258,7 +258,7 @@ class Collection(object):
|
||||
return response.json({'collection':r})
|
||||
# process DELETE
|
||||
elif request.env.request_method=='DELETE':
|
||||
table = db[tablename]
|
||||
table = db[tablename]
|
||||
if not request.get_vars:
|
||||
return self.error(400, "BAD REQUEST", "Nothing to delete")
|
||||
else:
|
||||
@@ -276,6 +276,8 @@ class Collection(object):
|
||||
table = db[tablename]
|
||||
if 'json' in request.env.content_type:
|
||||
data = request.post_vars.data
|
||||
else:
|
||||
data = request.post_vars
|
||||
if request.get_vars or len(request.args)>1: # update
|
||||
# ADD validate fields and return error
|
||||
try:
|
||||
@@ -310,7 +312,7 @@ class Collection(object):
|
||||
request, response = self.request, self.response
|
||||
r = OrderedDict({
|
||||
"version" : self.VERSION,
|
||||
"href" : URL(args=request.args,vars=request.vars),
|
||||
"href" : URL(args=request.args,vars=request.vars),
|
||||
"error" : {
|
||||
"title" : title,
|
||||
"code" : code,
|
||||
@@ -338,4 +340,3 @@ example_policies = {
|
||||
'DELETE':{'query':None},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ class BrowserID(object):
|
||||
auth_info_json = fetch(self.verify_url, data=verify_data)
|
||||
j = json.loads(auth_info_json)
|
||||
epoch_time = int(time.time() * 1000) # we need 13 digit epoch time
|
||||
if j["status"] == "okay" and j["audience"] == audience and j['issuer'] == issuer and j['expires'] >= epoch_time:
|
||||
if j["status"] == "okay" and j["audience"] == audience and j['issuer'].endswith(issuer) and j['expires'] >= epoch_time:
|
||||
return dict(email=j['email'])
|
||||
elif self.on_login_failure:
|
||||
#print "status: ", j["status"]=="okay", j["status"]
|
||||
|
||||
@@ -254,12 +254,12 @@ class Table(DALStorage):
|
||||
self._db(self.id > 0).delete()
|
||||
|
||||
|
||||
def insert(self, **fields):
|
||||
# Checks 3 times that the id is new. 3 times is enough!
|
||||
for i in range(3):
|
||||
id = self._create_id()
|
||||
if self.get(id) is None and self.update(id, **fields):
|
||||
return long(id)
|
||||
def insert(self, **fields):
|
||||
# Checks 3 times that the id is new. 3 times is enough!
|
||||
for i in range(3):
|
||||
id = self._create_id()
|
||||
if self.get(id) is None and self.update(id, **fields):
|
||||
return long(id)
|
||||
else:
|
||||
raise RuntimeError("Too many ID conflicts")
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Connection(object):
|
||||
parts = "complete"
|
||||
|
||||
return ("OK", (("%s " % message_id, message[parts]), message["flags"]))
|
||||
|
||||
|
||||
def _get_messages(self, query):
|
||||
if query.strip().isdigit():
|
||||
return [self.spam[self._mailbox][int(query.strip()) - 1],]
|
||||
@@ -151,7 +151,7 @@ class Connection(object):
|
||||
for item in self.spam[self._mailbox]:
|
||||
if item["uid"] == query[1:-1].replace("UID", "").strip():
|
||||
return [item,]
|
||||
messages = []
|
||||
messages = []
|
||||
try:
|
||||
for m in self.results[self._mailbox][query]:
|
||||
try:
|
||||
@@ -169,7 +169,7 @@ class Connection(object):
|
||||
return messages
|
||||
except KeyError:
|
||||
raise ValueError("The client issued an unexpected query: %s" % query)
|
||||
|
||||
|
||||
def setup(self, spam={}, results={}):
|
||||
"""adds custom message and query databases or sets
|
||||
the values to the module defaults.
|
||||
@@ -252,4 +252,3 @@ class IMAP4(object):
|
||||
return Connection()
|
||||
|
||||
IMAP4_SSL = IMAP4
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
Note: This module is intended as a plugin replacement of pbkdf2.py
|
||||
by Armin Ronacher.
|
||||
|
||||
Git repository:
|
||||
Git repository:
|
||||
$ git clone https://github.com/michele-comitini/pbkdf2_ctypes.git
|
||||
|
||||
:copyright: Copyright (c) 2013: Michele Comitini <mcm@glisco.it>
|
||||
@@ -86,7 +86,7 @@ def _openssl_hashlib_to_crypto_map_get(hashfunc):
|
||||
crypto_hashfunc.restype = ctypes.c_void_p
|
||||
return crypto_hashfunc()
|
||||
|
||||
|
||||
|
||||
def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
|
||||
"""OpenSSL compatibile wrapper
|
||||
"""
|
||||
@@ -99,7 +99,7 @@ def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
|
||||
c_iter = ctypes.c_int(iterations)
|
||||
c_keylen = ctypes.c_int(keylen)
|
||||
c_buff = ctypes.create_string_buffer(keylen)
|
||||
|
||||
|
||||
# PKCS5_PBKDF2_HMAC(const char *pass, int passlen,
|
||||
# const unsigned char *salt, int saltlen, int iter,
|
||||
# const EVP_MD *digest,
|
||||
@@ -109,7 +109,7 @@ def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
|
||||
ctypes.c_char_p, ctypes.c_int,
|
||||
ctypes.c_int, ctypes.c_void_p,
|
||||
ctypes.c_int, ctypes.c_char_p]
|
||||
|
||||
|
||||
crypto.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int
|
||||
err = crypto.PKCS5_PBKDF2_HMAC(c_pass, c_passlen,
|
||||
c_salt, c_saltlen,
|
||||
|
||||
+408
-408
File diff suppressed because it is too large
Load Diff
@@ -235,7 +235,7 @@ class SoapDispatcher(object):
|
||||
body.marshall("%s:Fault" % soap_ns, fault, ns=False)
|
||||
else:
|
||||
# return normal value
|
||||
res = body.add_child("%sResponse" % name, ns=prefix)
|
||||
res = body.add_child("%sResponse" % name, ns=self.namespace)
|
||||
if not prefix:
|
||||
res['xmlns'] = self.namespace # add target namespace
|
||||
|
||||
|
||||
+22
-22
@@ -37,7 +37,7 @@ def pay():
|
||||
elif form.errors:
|
||||
redirect(URL('pay_error'))
|
||||
return dict(form=form)
|
||||
|
||||
|
||||
"""
|
||||
|
||||
URL_CHARGE = 'https://%s:@api.stripe.com/v1/charges'
|
||||
@@ -114,7 +114,7 @@ class StripeForm(object):
|
||||
|
||||
def process(self):
|
||||
from gluon import current
|
||||
request = current.request
|
||||
request = current.request
|
||||
if request.post_vars:
|
||||
if self.signature == request.post_vars.signature:
|
||||
self.response = Stripe(self.sk).charge(
|
||||
@@ -127,7 +127,7 @@ class StripeForm(object):
|
||||
return self
|
||||
self.errors = True
|
||||
return self
|
||||
|
||||
|
||||
def xml(self):
|
||||
from gluon.template import render
|
||||
if self.accepted:
|
||||
@@ -135,8 +135,8 @@ class StripeForm(object):
|
||||
elif self.errors:
|
||||
return "There was an processing error"
|
||||
else:
|
||||
context = dict(amount=self.amount,
|
||||
signature=self.signature, pk=self.pk,
|
||||
context = dict(amount=self.amount,
|
||||
signature=self.signature, pk=self.pk,
|
||||
currency_symbol=self.currency_symbol,
|
||||
security_notice=self.security_notice,
|
||||
disclosure_notice=self.disclosure_notice)
|
||||
@@ -145,14 +145,14 @@ class StripeForm(object):
|
||||
|
||||
TEMPLATE = """
|
||||
<script type="text/javascript" src="https://js.stripe.com/v2/"></script>
|
||||
<script>
|
||||
<script>
|
||||
jQuery(function(){
|
||||
// This identifies your website in the createToken call below
|
||||
Stripe.setPublishableKey('{{=pk}}');
|
||||
|
||||
|
||||
var stripeResponseHandler = function(status, response) {
|
||||
var jQueryform = jQuery('#payment-form');
|
||||
|
||||
|
||||
if (response.error) {
|
||||
// Show the errors on the form
|
||||
jQuery('.payment-errors').text(response.error.message).show();
|
||||
@@ -167,17 +167,17 @@ jQuery(function(){
|
||||
jQueryform.get(0).submit();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
jQuery(function(jQuery) {
|
||||
jQuery('#payment-form').submit(function(e) {
|
||||
|
||||
var jQueryform = jQuery(this);
|
||||
|
||||
|
||||
// Disable the submit button to prevent repeated clicks
|
||||
jQueryform.find('button').prop('disabled', true);
|
||||
|
||||
|
||||
Stripe.createToken(jQueryform, stripeResponseHandler);
|
||||
|
||||
|
||||
// Prevent the form from submitting with the default action
|
||||
return false;
|
||||
});
|
||||
@@ -189,33 +189,33 @@ jQuery(function(){
|
||||
<form action="" method="POST" id="payment-form" class="form-horizontal">
|
||||
|
||||
<div class="form-row control-group">
|
||||
<label class="control-label">Card Number</label>
|
||||
<label class="control-label">Card Number</label>
|
||||
<div class="controls">
|
||||
<input type="text" size="20" data-stripe="number"
|
||||
placeholder="4242424242424242"/>
|
||||
placeholder="4242424242424242"/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="form-row control-group">
|
||||
<label class="control-label">CVC</label>
|
||||
<label class="control-label">CVC</label>
|
||||
<div class="controls">
|
||||
<input type="text" size="4" style="width:80px" data-stripe="cvc"
|
||||
placeholder="XXX"/>
|
||||
placeholder="XXX"/>
|
||||
<a href="http://en.wikipedia.org/wiki/Card_Verification_Code" target="_blank">What is this?</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="form-row control-group">
|
||||
<label class="control-label">Expiration</label>
|
||||
<label class="control-label">Expiration</label>
|
||||
<div class="controls">
|
||||
<input type="text" size="2" style="width:40px" data-stripe="exp-month"
|
||||
placeholder="MM"/>
|
||||
placeholder="MM"/>
|
||||
/
|
||||
<input type="text" size="4" style="width:80px" data-stripe="exp-year"
|
||||
placeholder="YYYY"/>
|
||||
placeholder="YYYY"/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div class="control-group">
|
||||
<div class="controls">
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
"""
|
||||
Developed by Massimo Di Pierro
|
||||
Released under the web2py license (LGPL)
|
||||
|
||||
@@ -70,6 +70,15 @@ Here is a complete sample web2py action:
|
||||
'http://127.0.0.1:8888', form.vars.message, 'mykey', 'mygroup')
|
||||
return form
|
||||
|
||||
https is possible too using 'https://127.0.0.1:8888' instead of 'http://127.0.0.1:8888', but need to
|
||||
be started with
|
||||
|
||||
python gluon/contrib/websocket_messaging.py -k mykey -p 8888 -s keyfile.pem -c certfile.pem
|
||||
|
||||
for secure websocket do:
|
||||
|
||||
web2py_websocket('wss://127.0.0.1:8888/realtime/mygroup',callback)
|
||||
|
||||
Acknowledgements:
|
||||
Tornado code inspired by http://thomas.pelletier.im/2010/08/websocket-tornado-redis/
|
||||
|
||||
@@ -104,7 +113,7 @@ class PostHandler(tornado.web.RequestHandler):
|
||||
"""
|
||||
def post(self):
|
||||
if hmac_key and not 'signature' in self.request.arguments:
|
||||
return None
|
||||
self.send_error(401)
|
||||
if 'message' in self.request.arguments:
|
||||
message = self.request.arguments['message'][0]
|
||||
group = self.request.arguments.get('group', ['default'])[0]
|
||||
@@ -112,10 +121,9 @@ class PostHandler(tornado.web.RequestHandler):
|
||||
if hmac_key:
|
||||
signature = self.request.arguments['signature'][0]
|
||||
if not hmac.new(hmac_key, message).hexdigest() == signature:
|
||||
return None
|
||||
self.send_error(401)
|
||||
for client in listeners.get(group, []):
|
||||
client.write_message(message)
|
||||
return None
|
||||
|
||||
|
||||
class TokenHandler(tornado.web.RequestHandler):
|
||||
@@ -126,15 +134,14 @@ class TokenHandler(tornado.web.RequestHandler):
|
||||
"""
|
||||
def post(self):
|
||||
if hmac_key and not 'message' in self.request.arguments:
|
||||
return None
|
||||
self.send_error(401)
|
||||
if 'message' in self.request.arguments:
|
||||
message = self.request.arguments['message'][0]
|
||||
if hmac_key:
|
||||
signature = self.request.arguments['signature'][0]
|
||||
if not hmac.new(hmac_key, message).hexdigest() == signature:
|
||||
return None
|
||||
self.send_error(401)
|
||||
tokens[message] = None
|
||||
return None
|
||||
|
||||
|
||||
class DistributeHandler(tornado.websocket.WebSocketHandler):
|
||||
@@ -170,6 +177,12 @@ class DistributeHandler(tornado.websocket.WebSocketHandler):
|
||||
client.write_message('-' + self.name)
|
||||
print '%s:DISCONNECT from %s' % (time.time(), self.group)
|
||||
|
||||
# if your webserver is different from tornado server uncomment this
|
||||
# or override using something more restrictive:
|
||||
# http://tornado.readthedocs.org/en/latest/websocket.html#tornado.websocket.WebSocketHandler.check_origin
|
||||
# def check_origin(self, origin):
|
||||
# return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
usage = __doc__
|
||||
version = ""
|
||||
@@ -195,6 +208,16 @@ if __name__ == "__main__":
|
||||
default=False,
|
||||
dest='tokens',
|
||||
help='require tockens to join')
|
||||
parser.add_option('-s',
|
||||
'--sslkey',
|
||||
default=False,
|
||||
dest='keyfile',
|
||||
help='require ssl keyfile full path')
|
||||
parser.add_option('-c',
|
||||
'--sslcert',
|
||||
default=False,
|
||||
dest='certfile',
|
||||
help='require ssl certfile full path')
|
||||
(options, args) = parser.parse_args()
|
||||
hmac_key = options.hmac_key
|
||||
DistributeHandler.tokens = options.tokens
|
||||
@@ -203,6 +226,10 @@ if __name__ == "__main__":
|
||||
(r'/token', TokenHandler),
|
||||
(r'/realtime/(.*)', DistributeHandler)]
|
||||
application = tornado.web.Application(urls, auto_reload=True)
|
||||
http_server = tornado.httpserver.HTTPServer(application)
|
||||
if options.keyfile and options.certfile:
|
||||
ssl_options = dict(certfile=options.certfile, keyfile=options.keyfile)
|
||||
else:
|
||||
ssl_options = None
|
||||
http_server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_options)
|
||||
http_server.listen(int(options.port), address=options.address)
|
||||
tornado.ioloop.IOLoop.instance().start()
|
||||
|
||||
@@ -41,7 +41,7 @@ class CustomImportException(ImportError):
|
||||
|
||||
def custom_importer(name, globals=None, locals=None, fromlist=None, level=-1):
|
||||
"""
|
||||
web2py's custom importer. It behaves like the standard Python importer but
|
||||
web2py's custom importer. It behaves like the standard Python importer but
|
||||
it tries to transform import statements as something like
|
||||
"import applications.app_name.modules.x".
|
||||
If the import fails, it falls back on naive_importer
|
||||
@@ -81,7 +81,7 @@ def custom_importer(name, globals=None, locals=None, fromlist=None, level=-1):
|
||||
new_mod = base_importer(
|
||||
modules_prefix, globals, locals, [itemname], level)
|
||||
try:
|
||||
result = result or new_mod.__dict__[itemname]
|
||||
result = result or sys.modules[modules_prefix+'.'+itemname]
|
||||
except KeyError, e:
|
||||
raise ImportError, 'Cannot import module %s' % str(e)
|
||||
modules_prefix += "." + itemname
|
||||
|
||||
-11686
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,4 @@
|
||||
from .base import DAL
|
||||
from .objects import Field
|
||||
from .helpers.classes import SQLCustomType
|
||||
from .helpers.methods import geoPoint, geoLine, geoPolygon
|
||||
@@ -0,0 +1,21 @@
|
||||
import sys
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
|
||||
if PY2:
|
||||
import cPickle as pickle
|
||||
import cStringIO as StringIO
|
||||
import copy_reg as copyreg
|
||||
hashlib_md5 = hashlib.md5
|
||||
else:
|
||||
import pickle
|
||||
from io import StringIO
|
||||
import copyreg
|
||||
hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
|
||||
|
||||
pjoin = os.path.join
|
||||
exists = os.path.exists
|
||||
ogetattr = object.__getattribute__
|
||||
osetattr = object.__setattr__
|
||||
@@ -0,0 +1,13 @@
|
||||
import threading
|
||||
import logging
|
||||
|
||||
GLOBAL_LOCKER = threading.RLock()
|
||||
THREAD_LOCAL = threading.local()
|
||||
|
||||
LOGGER = logging.getLogger("web2py.dal")
|
||||
|
||||
DEFAULT = lambda: None
|
||||
|
||||
def IDENTITY(x): return x
|
||||
def OR(a,b): return a|b
|
||||
def AND(a,b): return a&b
|
||||
@@ -0,0 +1,313 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import decimal
|
||||
import re
|
||||
|
||||
from ._globals import LOGGER
|
||||
|
||||
|
||||
# verify presence of web2py modules
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
from gluon.contrib.ordereddict import OrderedDict
|
||||
|
||||
try:
|
||||
from gluon.utils import web2py_uuid
|
||||
except (ImportError, SystemError):
|
||||
import uuid
|
||||
def web2py_uuid(): return str(uuid.uuid4())
|
||||
|
||||
try:
|
||||
import portalocker
|
||||
have_portalocker = True
|
||||
except ImportError:
|
||||
portalocker = None
|
||||
have_portalocker = False
|
||||
|
||||
try:
|
||||
from gluon import serializers
|
||||
have_serializers = True
|
||||
simplejson = None
|
||||
except ImportError:
|
||||
serializers = None
|
||||
have_serializers = False
|
||||
try:
|
||||
import json as simplejson
|
||||
except ImportError:
|
||||
try:
|
||||
import gluon.contrib.simplejson as simplejson
|
||||
except ImportError:
|
||||
simplejson = None
|
||||
|
||||
|
||||
# list of drivers will be built on the fly
|
||||
# and lists only what is available
|
||||
DRIVERS = []
|
||||
|
||||
try:
|
||||
from new import classobj
|
||||
from google.appengine.ext import db as gae
|
||||
from google.appengine.ext import ndb
|
||||
from google.appengine.api import namespace_manager, rdbms
|
||||
from google.appengine.api.datastore_types import Key ### for belongs on ID
|
||||
from google.appengine.ext.db.polymodel import PolyModel
|
||||
from google.appengine.ext.ndb.polymodel import PolyModel as NDBPolyModel
|
||||
DRIVERS.append('google')
|
||||
except ImportError:
|
||||
classobj = None
|
||||
gae = None
|
||||
ndb = None
|
||||
namespace_manager = rdbms = None
|
||||
Key = None
|
||||
PolyModel = NDBPolyModel = None
|
||||
|
||||
if not 'google' in DRIVERS:
|
||||
|
||||
try:
|
||||
from pysqlite2 import dbapi2 as sqlite2
|
||||
DRIVERS.append('sqlite2')
|
||||
except ImportError:
|
||||
LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
|
||||
|
||||
try:
|
||||
from sqlite3 import dbapi2 as sqlite3
|
||||
DRIVERS.append('sqlite3')
|
||||
except ImportError:
|
||||
LOGGER.debug('no SQLite drivers sqlite3')
|
||||
|
||||
try:
|
||||
# first try contrib driver, then from site-packages (if installed)
|
||||
try:
|
||||
import gluon.contrib.pymysql as pymysql
|
||||
# monkeypatch pymysql because they havent fixed the bug:
|
||||
# https://github.com/petehunt/PyMySQL/issues/86
|
||||
pymysql.ESCAPE_REGEX = re.compile("'")
|
||||
pymysql.ESCAPE_MAP = {"'": "''"}
|
||||
# end monkeypatch
|
||||
except ImportError:
|
||||
import pymysql
|
||||
DRIVERS.append('pymysql')
|
||||
except ImportError:
|
||||
LOGGER.debug('no MySQL driver pymysql')
|
||||
|
||||
try:
|
||||
import MySQLdb
|
||||
DRIVERS.append('MySQLdb')
|
||||
except ImportError:
|
||||
LOGGER.debug('no MySQL driver MySQLDB')
|
||||
|
||||
try:
|
||||
import mysql.connector as mysqlconnector
|
||||
DRIVERS.append("mysqlconnector")
|
||||
except ImportError:
|
||||
LOGGER.debug("no driver mysql.connector")
|
||||
|
||||
try:
|
||||
import psycopg2
|
||||
from psycopg2.extensions import adapt as psycopg2_adapt
|
||||
DRIVERS.append('psycopg2')
|
||||
except ImportError:
|
||||
psycopg2_adapt = None
|
||||
LOGGER.debug('no PostgreSQL driver psycopg2')
|
||||
|
||||
try:
|
||||
# first try contrib driver, then from site-packages (if installed)
|
||||
try:
|
||||
import gluon.contrib.pg8000.dbapi as pg8000
|
||||
except ImportError:
|
||||
import pg8000.dbapi as pg8000
|
||||
DRIVERS.append('pg8000')
|
||||
except ImportError:
|
||||
LOGGER.debug('no PostgreSQL driver pg8000')
|
||||
|
||||
try:
|
||||
import cx_Oracle
|
||||
DRIVERS.append('cx_Oracle')
|
||||
except ImportError:
|
||||
cx_Oracle = None
|
||||
LOGGER.debug('no Oracle driver cx_Oracle')
|
||||
|
||||
try:
|
||||
try:
|
||||
import pyodbc
|
||||
except ImportError:
|
||||
try:
|
||||
import gluon.contrib.pypyodbc as pyodbc
|
||||
except Exception, e:
|
||||
raise ImportError(str(e))
|
||||
DRIVERS.append('pyodbc')
|
||||
#DRIVERS.append('DB2(pyodbc)')
|
||||
#DRIVERS.append('Teradata(pyodbc)')
|
||||
#DRIVERS.append('Ingres(pyodbc)')
|
||||
except ImportError:
|
||||
pyodbc = None
|
||||
LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
|
||||
|
||||
try:
|
||||
import ibm_db_dbi
|
||||
DRIVERS.append('ibm_db_dbi')
|
||||
except ImportError:
|
||||
LOGGER.debug('no DB2 driver ibm_db_dbi')
|
||||
|
||||
try:
|
||||
import Sybase
|
||||
DRIVERS.append('Sybase')
|
||||
except ImportError:
|
||||
LOGGER.debug('no Sybase driver')
|
||||
|
||||
try:
|
||||
import kinterbasdb
|
||||
DRIVERS.append('kinterbasdb')
|
||||
#DRIVERS.append('Firebird(kinterbasdb)')
|
||||
except ImportError:
|
||||
LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
|
||||
|
||||
try:
|
||||
import fdb
|
||||
DRIVERS.append('fdb')
|
||||
except ImportError:
|
||||
LOGGER.debug('no Firebird driver fdb')
|
||||
|
||||
try:
|
||||
import firebirdsql
|
||||
DRIVERS.append('firebirdsql')
|
||||
except ImportError:
|
||||
LOGGER.debug('no Firebird driver firebirdsql')
|
||||
|
||||
try:
|
||||
import informixdb
|
||||
DRIVERS.append('informixdb')
|
||||
LOGGER.warning('Informix support is experimental')
|
||||
except ImportError:
|
||||
LOGGER.debug('no Informix driver informixdb')
|
||||
|
||||
try:
|
||||
import sapdb
|
||||
DRIVERS.append('sapdb')
|
||||
LOGGER.warning('SAPDB support is experimental')
|
||||
except ImportError:
|
||||
LOGGER.debug('no SAP driver sapdb')
|
||||
|
||||
try:
|
||||
import cubriddb
|
||||
DRIVERS.append('cubriddb')
|
||||
LOGGER.warning('Cubrid support is experimental')
|
||||
except ImportError:
|
||||
LOGGER.debug('no Cubrid driver cubriddb')
|
||||
|
||||
try:
|
||||
from com.ziclix.python.sql import zxJDBC
|
||||
import java.sql
|
||||
# Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/
|
||||
from org.sqlite import JDBC # required by java.sql; ensure we have it
|
||||
zxJDBC_sqlite = java.sql.DriverManager
|
||||
DRIVERS.append('zxJDBC')
|
||||
#DRIVERS.append('SQLite(zxJDBC)')
|
||||
LOGGER.warning('zxJDBC support is experimental')
|
||||
is_jdbc = True
|
||||
except ImportError:
|
||||
LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
|
||||
is_jdbc = False
|
||||
|
||||
try:
|
||||
import couchdb
|
||||
DRIVERS.append('couchdb')
|
||||
except ImportError:
|
||||
couchdb = None
|
||||
LOGGER.debug('no Couchdb driver couchdb')
|
||||
|
||||
try:
|
||||
import pymongo
|
||||
DRIVERS.append('pymongo')
|
||||
except:
|
||||
LOGGER.debug('no MongoDB driver pymongo')
|
||||
|
||||
try:
|
||||
import imaplib
|
||||
DRIVERS.append('imaplib')
|
||||
except:
|
||||
LOGGER.debug('no IMAP driver imaplib')
|
||||
|
||||
GAEDecimalProperty = None
|
||||
NDBDecimalProperty = None
|
||||
else:
|
||||
is_jdbc = False
|
||||
|
||||
class GAEDecimalProperty(gae.Property):
|
||||
"""
|
||||
GAE decimal implementation
|
||||
"""
|
||||
data_type = decimal.Decimal
|
||||
|
||||
def __init__(self, precision, scale, **kwargs):
|
||||
super(GAEDecimalProperty, self).__init__(self, **kwargs)
|
||||
d = '1.'
|
||||
for x in range(scale):
|
||||
d += '0'
|
||||
self.round = decimal.Decimal(d)
|
||||
|
||||
def get_value_for_datastore(self, model_instance):
|
||||
value = super(GAEDecimalProperty, self)\
|
||||
.get_value_for_datastore(model_instance)
|
||||
if value is None or value == '':
|
||||
return None
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
def make_value_from_datastore(self, value):
|
||||
if value is None or value == '':
|
||||
return None
|
||||
else:
|
||||
return decimal.Decimal(value).quantize(self.round)
|
||||
|
||||
def validate(self, value):
|
||||
value = super(GAEDecimalProperty, self).validate(value)
|
||||
if value is None or isinstance(value, decimal.Decimal):
|
||||
return value
|
||||
elif isinstance(value, basestring):
|
||||
return decimal.Decimal(value)
|
||||
raise gae.BadValueError("Property %s must be a Decimal or string."\
|
||||
% self.name)
|
||||
|
||||
#TODO Needs more testing
|
||||
class NDBDecimalProperty(ndb.StringProperty):
|
||||
"""
|
||||
NDB decimal implementation
|
||||
"""
|
||||
data_type = decimal.Decimal
|
||||
|
||||
def __init__(self, precision, scale, **kwargs):
|
||||
d = '1.'
|
||||
for x in range(scale):
|
||||
d += '0'
|
||||
self.round = decimal.Decimal(d)
|
||||
|
||||
def _to_base_type(self, value):
|
||||
if value is None or value == '':
|
||||
return None
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
def _from_base_type(self, value):
|
||||
if value is None or value == '':
|
||||
return None
|
||||
else:
|
||||
return decimal.Decimal(value).quantize(self.round)
|
||||
|
||||
def _validate(self, value):
|
||||
if value is None or isinstance(value, decimal.Decimal):
|
||||
return value
|
||||
elif isinstance(value, basestring):
|
||||
return decimal.Decimal(value)
|
||||
raise TypeError("Property %s must be a Decimal or string."\
|
||||
% self._name)
|
||||
|
||||
psycopg2_adapt = None
|
||||
cx_Oracle = None
|
||||
pyodbc = None
|
||||
couchdb = None
|
||||
|
||||
|
||||
def get_driver(name):
|
||||
return globals().get(name)
|
||||
@@ -0,0 +1,60 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from .sqlite import SQLiteAdapter, SpatiaLiteAdapter, JDBCSQLiteAdapter
|
||||
from .mysql import MySQLAdapter
|
||||
from .postgres import PostgreSQLAdapter, NewPostgreSQLAdapter, JDBCPostgreSQLAdapter
|
||||
from .oracle import OracleAdapter
|
||||
from .mssql import MSSQLAdapter, MSSQL2Adapter, MSSQL3Adapter, MSSQL4Adapter, \
|
||||
VerticaAdapter, SybaseAdapter
|
||||
from .firebird import FireBirdAdapter
|
||||
from .informix import InformixAdapter, InformixSEAdapter
|
||||
from .db2 import DB2Adapter
|
||||
from .teradata import TeradataAdapter
|
||||
from .ingres import IngresAdapter, IngresUnicodeAdapter
|
||||
from .sapdb import SAPDBAdapter
|
||||
from .cubrid import CubridAdapter
|
||||
from .google import GoogleDatastoreAdapter, GoogleSQLAdapter
|
||||
from .couchdb import CouchDBAdapter
|
||||
from .mongo import MongoDBAdapter
|
||||
from .imap import IMAPAdapter
|
||||
|
||||
|
||||
ADAPTERS = {
|
||||
'sqlite': SQLiteAdapter,
|
||||
'spatialite': SpatiaLiteAdapter,
|
||||
'sqlite:memory': SQLiteAdapter,
|
||||
'spatialite:memory': SpatiaLiteAdapter,
|
||||
'mysql': MySQLAdapter,
|
||||
'postgres': PostgreSQLAdapter,
|
||||
'postgres:psycopg2': PostgreSQLAdapter,
|
||||
'postgres:pg8000': PostgreSQLAdapter,
|
||||
'postgres2:psycopg2': NewPostgreSQLAdapter,
|
||||
'postgres2:pg8000': NewPostgreSQLAdapter,
|
||||
'oracle': OracleAdapter,
|
||||
'mssql': MSSQLAdapter,
|
||||
'mssql2': MSSQL2Adapter,
|
||||
'mssql3': MSSQL3Adapter,
|
||||
'mssql4' : MSSQL4Adapter,
|
||||
'vertica': VerticaAdapter,
|
||||
'sybase': SybaseAdapter,
|
||||
'db2:ibm_db_dbi': DB2Adapter,
|
||||
'db2:pyodbc': DB2Adapter,
|
||||
'teradata': TeradataAdapter,
|
||||
'informix': InformixAdapter,
|
||||
'informix-se': InformixSEAdapter,
|
||||
'firebird': FireBirdAdapter,
|
||||
'firebird_embedded': FireBirdAdapter,
|
||||
'ingres': IngresAdapter,
|
||||
'ingresu': IngresUnicodeAdapter,
|
||||
'sapdb': SAPDBAdapter,
|
||||
'cubrid': CubridAdapter,
|
||||
'jdbc:sqlite': JDBCSQLiteAdapter,
|
||||
'jdbc:sqlite:memory': JDBCSQLiteAdapter,
|
||||
'jdbc:postgres': JDBCPostgreSQLAdapter,
|
||||
'gae': GoogleDatastoreAdapter, # discouraged, for backward compatibility
|
||||
'google:datastore': GoogleDatastoreAdapter,
|
||||
'google:datastore+ndb': GoogleDatastoreAdapter,
|
||||
'google:sql': GoogleSQLAdapter,
|
||||
'couchdb': CouchDBAdapter,
|
||||
'mongodb': MongoDBAdapter,
|
||||
'imap': IMAPAdapter
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,202 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .._load import serializers, couchdb, web2py_uuid
|
||||
from ..objects import Field, Query
|
||||
from ..helpers.classes import SQLALL
|
||||
from ..helpers.methods import uuid2int
|
||||
from .base import BaseAdapter, NoSQLAdapter, SELECT_ARGS
|
||||
|
||||
|
||||
class CouchDBAdapter(NoSQLAdapter):
|
||||
drivers = ('couchdb',)
|
||||
|
||||
uploads_in_blob = True
|
||||
types = {
|
||||
'boolean': bool,
|
||||
'string': str,
|
||||
'text': str,
|
||||
'json': str,
|
||||
'password': str,
|
||||
'blob': str,
|
||||
'upload': str,
|
||||
'integer': long,
|
||||
'bigint': long,
|
||||
'float': float,
|
||||
'double': float,
|
||||
'date': datetime.date,
|
||||
'time': datetime.time,
|
||||
'datetime': datetime.datetime,
|
||||
'id': long,
|
||||
'reference': long,
|
||||
'list:string': list,
|
||||
'list:integer': list,
|
||||
'list:reference': list,
|
||||
}
|
||||
|
||||
def file_exists(self, filename): pass
|
||||
def file_open(self, filename, mode='rb', lock=True): pass
|
||||
def file_close(self, fileobj): pass
|
||||
|
||||
def expand(self,expression,field_type=None):
|
||||
if isinstance(expression,Field):
|
||||
if expression.type=='id':
|
||||
return "%s._id" % expression.tablename
|
||||
return BaseAdapter.expand(self,expression,field_type)
|
||||
|
||||
def AND(self,first,second):
|
||||
return '(%s && %s)' % (self.expand(first),self.expand(second))
|
||||
|
||||
def OR(self,first,second):
|
||||
return '(%s || %s)' % (self.expand(first),self.expand(second))
|
||||
|
||||
def EQ(self,first,second):
|
||||
if second is None:
|
||||
return '(%s == null)' % self.expand(first)
|
||||
return '(%s == %s)' % (self.expand(first),self.expand(second,first.type))
|
||||
|
||||
def NE(self,first,second):
|
||||
if second is None:
|
||||
return '(%s != null)' % self.expand(first)
|
||||
return '(%s != %s)' % (self.expand(first),self.expand(second,first.type))
|
||||
|
||||
def COMMA(self,first,second):
|
||||
return '%s + %s' % (self.expand(first),self.expand(second))
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
value = NoSQLAdapter.represent(self, obj, fieldtype)
|
||||
if fieldtype=='id':
|
||||
return repr(str(long(value)))
|
||||
elif fieldtype in ('date','time','datetime','boolean'):
|
||||
return serializers.json(value)
|
||||
return repr(not isinstance(value,unicode) and value \
|
||||
or value and value.encode('utf8'))
|
||||
|
||||
def __init__(self,db,uri='couchdb://127.0.0.1:5984',
|
||||
pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args)
|
||||
self.dbengine = 'couchdb'
|
||||
self.folder = folder
|
||||
db['_lastsql'] = ''
|
||||
self.db_codec = 'UTF-8'
|
||||
self._after_connection = after_connection
|
||||
self.pool_size = pool_size
|
||||
|
||||
url='http://'+uri[10:]
|
||||
def connector(url=url,driver_args=driver_args):
|
||||
return self.driver.Server(url,**driver_args)
|
||||
self.reconnect(connector,cursor=False)
|
||||
|
||||
def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
|
||||
if migrate:
|
||||
try:
|
||||
self.connection.create(table._tablename)
|
||||
except:
|
||||
pass
|
||||
|
||||
def insert(self,table,fields):
|
||||
id = uuid2int(web2py_uuid())
|
||||
ctable = self.connection[table._tablename]
|
||||
values = dict((k.name,self.represent(v,k.type)) for k,v in fields)
|
||||
values['_id'] = str(id)
|
||||
ctable.save(values)
|
||||
return id
|
||||
|
||||
def _select(self,query,fields,attributes):
|
||||
if not isinstance(query,Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
for key in set(attributes.keys())-SELECT_ARGS:
|
||||
raise SyntaxError('invalid select attribute: %s' % key)
|
||||
new_fields=[]
|
||||
for item in fields:
|
||||
if isinstance(item,SQLALL):
|
||||
new_fields += item._table
|
||||
else:
|
||||
new_fields.append(item)
|
||||
def uid(fd):
|
||||
return fd=='id' and '_id' or fd
|
||||
def get(row,fd):
|
||||
return fd=='id' and long(row['_id']) or row.get(fd,None)
|
||||
fields = new_fields
|
||||
tablename = self.get_table(query)
|
||||
fieldnames = [f.name for f in (fields or self.db[tablename])]
|
||||
colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
|
||||
fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
|
||||
fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
|
||||
dict(t=tablename,
|
||||
query=self.expand(query),
|
||||
order='%s._id' % tablename,
|
||||
fields=fields)
|
||||
return fn, colnames
|
||||
|
||||
def select(self,query,fields,attributes):
|
||||
if not isinstance(query,Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
fn, colnames = self._select(query,fields,attributes)
|
||||
tablename = colnames[0].split('.')[0]
|
||||
ctable = self.connection[tablename]
|
||||
rows = [cols['value'] for cols in ctable.query(fn)]
|
||||
processor = attributes.get('processor',self.parse)
|
||||
return processor(rows,fields,colnames,False)
|
||||
|
||||
def delete(self,tablename,query):
|
||||
if not isinstance(query,Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
if query.first.type=='id' and query.op==self.EQ:
|
||||
id = query.second
|
||||
tablename = query.first.tablename
|
||||
assert(tablename == query.first.tablename)
|
||||
ctable = self.connection[tablename]
|
||||
try:
|
||||
del ctable[str(id)]
|
||||
return 1
|
||||
except couchdb.http.ResourceNotFound:
|
||||
return 0
|
||||
else:
|
||||
tablename = self.get_table(query)
|
||||
rows = self.select(query,[self.db[tablename]._id],{})
|
||||
ctable = self.connection[tablename]
|
||||
for row in rows:
|
||||
del ctable[str(row.id)]
|
||||
return len(rows)
|
||||
|
||||
def update(self,tablename,query,fields):
|
||||
if not isinstance(query,Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
if query.first.type=='id' and query.op==self.EQ:
|
||||
id = query.second
|
||||
tablename = query.first.tablename
|
||||
ctable = self.connection[tablename]
|
||||
try:
|
||||
doc = ctable[str(id)]
|
||||
for key,value in fields:
|
||||
doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
|
||||
ctable.save(doc)
|
||||
return 1
|
||||
except couchdb.http.ResourceNotFound:
|
||||
return 0
|
||||
else:
|
||||
tablename = self.get_table(query)
|
||||
rows = self.select(query,[self.db[tablename]._id],{})
|
||||
ctable = self.connection[tablename]
|
||||
table = self.db[tablename]
|
||||
for row in rows:
|
||||
doc = ctable[str(row.id)]
|
||||
for key,value in fields:
|
||||
doc[key.name] = self.represent(value,table[key.name].type)
|
||||
ctable.save(doc)
|
||||
return len(rows)
|
||||
|
||||
def count(self,query,distinct=None):
|
||||
if distinct:
|
||||
raise RuntimeError("COUNT DISTINCT not supported")
|
||||
if not isinstance(query,Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
tablename = self.get_table(query)
|
||||
rows = self.select(query,[self.db[tablename]._id],{})
|
||||
return len(rows)
|
||||
@@ -0,0 +1,54 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .mysql import MySQLAdapter
|
||||
|
||||
|
||||
class CubridAdapter(MySQLAdapter):
|
||||
drivers = ('cubriddb',)
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
|
||||
|
||||
def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "cubrid"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
"Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
port = int(m.group('port') or '30000')
|
||||
user = credential_decoder(user)
|
||||
passwd = credential_decoder(password)
|
||||
def connector(host=host,port=port,db=db,
|
||||
user=user,passwd=passwd,driver_args=driver_args):
|
||||
return self.driver.connect(host,port,db,user,passwd,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.execute('SET FOREIGN_KEY_CHECKS=1;')
|
||||
self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import datetime
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class DB2Adapter(BaseAdapter):
|
||||
drivers = ('ibm_db_dbi', 'pyodbc')
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'CLOB',
|
||||
'json': 'CLOB',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BLOB',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'REAL',
|
||||
'double': 'DOUBLE',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'TIMESTAMP',
|
||||
'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
|
||||
'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'CLOB',
|
||||
'list:string': 'CLOB',
|
||||
'list:reference': 'CLOB',
|
||||
'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
|
||||
'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def LEFT_JOIN(self):
|
||||
return 'LEFT OUTER JOIN'
|
||||
|
||||
def RANDOM(self):
|
||||
return 'RAND()'
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def represent_exceptions(self, obj, fieldtype):
|
||||
if fieldtype == 'blob':
|
||||
obj = base64.b64encode(str(obj))
|
||||
return "BLOB('%s')" % obj
|
||||
elif fieldtype == 'datetime':
|
||||
if isinstance(obj, datetime.datetime):
|
||||
obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
|
||||
elif isinstance(obj, datetime.date):
|
||||
obj = obj.isoformat()[:10]+'-00.00.00'
|
||||
return "'%s'" % obj
|
||||
return None
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "db2"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://', 1)[1]
|
||||
|
||||
def connector(cnxn=ruri,driver_args=driver_args):
|
||||
if self.driver_name == 'ibm_db_dbi':
|
||||
vars = cnxn.split(";")
|
||||
cnxn = {}
|
||||
for var in vars:
|
||||
v = var.split('=')
|
||||
cnxn[v[0].lower()] = v[1]
|
||||
return self.driver.connect(cnxn['dsn'], cnxn['uid'], cnxn['pwd'], **driver_args)
|
||||
else:
|
||||
return self.driver.connect(cnxn, **driver_args)
|
||||
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def execute(self,command,placeholders=None):
|
||||
if command[-1:]==';':
|
||||
command = command[:-1]
|
||||
if placeholders:
|
||||
return self.log_execute(command, placeholders)
|
||||
return self.log_execute(command)
|
||||
|
||||
def lastrowid(self,table):
|
||||
self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
|
||||
return long(self.cursor.fetchone()[0])
|
||||
|
||||
def rowslice(self,rows,minimum=0,maximum=None):
|
||||
if maximum is None:
|
||||
return rows[minimum:]
|
||||
return rows[minimum:maximum]
|
||||
@@ -0,0 +1,182 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from ..objects import Expression
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class FireBirdAdapter(BaseAdapter):
|
||||
drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
|
||||
|
||||
commit_on_alter_table = False
|
||||
support_distributed_transaction = True
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'BLOB SUB_TYPE 1',
|
||||
'json': 'BLOB SUB_TYPE 1',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BLOB SUB_TYPE 0',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INTEGER',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'DOUBLE PRECISION',
|
||||
'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'TIMESTAMP',
|
||||
'id': 'INTEGER PRIMARY KEY',
|
||||
'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'BLOB SUB_TYPE 1',
|
||||
'list:string': 'BLOB SUB_TYPE 1',
|
||||
'list:reference': 'BLOB SUB_TYPE 1',
|
||||
'big-id': 'BIGINT PRIMARY KEY',
|
||||
'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def sequence_name(self,tablename):
|
||||
return ('genid_' + self.QUOTE_TEMPLATE) % tablename
|
||||
|
||||
def trigger_name(self,tablename):
|
||||
return 'trg_id_%s' % tablename
|
||||
|
||||
def RANDOM(self):
|
||||
return 'RAND()'
|
||||
|
||||
def EPOCH(self, first):
|
||||
return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
|
||||
|
||||
def NOT_NULL(self,default,field_type):
|
||||
return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
|
||||
|
||||
def SUBSTRING(self,field,parameters):
|
||||
return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
|
||||
|
||||
def LENGTH(self, first):
|
||||
return "CHAR_LENGTH(%s)" % self.expand(first)
|
||||
|
||||
def CONTAINS(self,first,second,case_sensitive=False):
|
||||
if first.type.startswith('list:'):
|
||||
second = Expression(None,self.CONCAT('|',Expression(
|
||||
None,self.REPLACE(second,('|','||'))),'|'))
|
||||
return '(%s CONTAINING %s)' % (self.expand(first),
|
||||
self.expand(second, 'string'))
|
||||
|
||||
def _drop(self,table,mode):
|
||||
sequence_name = table._sequence_name
|
||||
return ['DROP TABLE %s %s;' % (table.sqlsafe, mode), 'DROP GENERATOR %s;' % sequence_name]
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def _truncate(self,table,mode = ''):
|
||||
return ['DELETE FROM %s;' % table._tablename,
|
||||
'SET GENERATOR %s TO 0;' % table._sequence_name]
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "firebird"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
port = int(m.group('port') or 3050)
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
charset = m.group('charset') or 'UTF8'
|
||||
driver_args.update(dsn='%s/%s:%s' % (host,port,db),
|
||||
user = credential_decoder(user),
|
||||
password = credential_decoder(password),
|
||||
charset = charset)
|
||||
|
||||
def connector(driver_args=driver_args):
|
||||
return self.driver.connect(**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def create_sequence_and_triggers(self, query, table, **args):
|
||||
tablename = table._tablename
|
||||
sequence_name = table._sequence_name
|
||||
trigger_name = table._trigger_name
|
||||
self.execute(query)
|
||||
self.execute('create generator %s;' % sequence_name)
|
||||
self.execute('set generator %s to 0;' % sequence_name)
|
||||
self.execute('create trigger %s for %s active before insert position 0 as\nbegin\nif(new.id is null) then\nbegin\nnew.id = gen_id(%s, 1);\nend\nend;' % (trigger_name, tablename, sequence_name))
|
||||
|
||||
def lastrowid(self,table):
|
||||
sequence_name = table._sequence_name
|
||||
self.execute('SELECT gen_id(%s, 0) FROM rdb$database' % sequence_name)
|
||||
return long(self.cursor.fetchone()[0])
|
||||
|
||||
|
||||
class FireBirdEmbeddedAdapter(FireBirdAdapter):
|
||||
drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "firebird"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
"Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
pathdb = m.group('path')
|
||||
if not pathdb:
|
||||
raise SyntaxError('Path required')
|
||||
charset = m.group('charset')
|
||||
if not charset:
|
||||
charset = 'UTF8'
|
||||
host = ''
|
||||
driver_args.update(host=host,
|
||||
database=pathdb,
|
||||
user=credential_decoder(user),
|
||||
password=credential_decoder(password),
|
||||
charset=charset)
|
||||
|
||||
def connector(driver_args=driver_args):
|
||||
return self.driver.connect(**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
@@ -0,0 +1,621 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import re
|
||||
|
||||
from .._compat import pjoin
|
||||
from .._globals import IDENTITY, LOGGER, THREAD_LOCAL
|
||||
from .._load import classobj, gae, ndb, NDBDecimalProperty, GAEDecimalProperty, \
|
||||
namespace_manager, Key, NDBPolyModel, PolyModel, rdbms, have_serializers, \
|
||||
serializers, simplejson
|
||||
from ..objects import Table, Field, Expression, Query
|
||||
from ..helpers.classes import SQLCustomType, SQLALL, Reference, UseDatabaseStoredFile
|
||||
from ..helpers.methods import use_common_filters, xorify
|
||||
from .base import NoSQLAdapter
|
||||
from .mysql import MySQLAdapter
|
||||
|
||||
|
||||
class GoogleSQLAdapter(UseDatabaseStoredFile, MySQLAdapter):
|
||||
uploads_in_blob = True
|
||||
|
||||
REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
|
||||
|
||||
def __init__(self, db, uri='google:sql://realm:domain/database',
|
||||
pool_size=0, folder=None, db_codec='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
|
||||
self.db = db
|
||||
self.dbengine = "mysql"
|
||||
self.uri = uri
|
||||
self.pool_size = pool_size
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
if do_connect: self.find_driver(adapter_args, uri)
|
||||
self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
|
||||
os.sep+'applications'+os.sep,1)[1])
|
||||
ruri = uri.split("://")[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
|
||||
instance = credential_decoder(m.group('instance'))
|
||||
self.dbstring = db = credential_decoder(m.group('db'))
|
||||
driver_args['instance'] = instance
|
||||
if not 'charset' in driver_args:
|
||||
driver_args['charset'] = 'utf8'
|
||||
self.createdb = createdb = adapter_args.get('createdb',True)
|
||||
if not createdb:
|
||||
driver_args['database'] = db
|
||||
def connector(driver_args=driver_args):
|
||||
return rdbms.connect(**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
if self.createdb:
|
||||
# self.execute('DROP DATABASE %s' % self.dbstring)
|
||||
self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
|
||||
self.execute('USE %s' % self.dbstring)
|
||||
self.execute("SET FOREIGN_KEY_CHECKS=1;")
|
||||
self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
|
||||
|
||||
def execute(self, command, *a, **b):
|
||||
return self.log_execute(command.decode('utf8'), *a, **b)
|
||||
|
||||
def find_driver(self,adapter_args,uri=None):
|
||||
self.adapter_args = adapter_args
|
||||
self.driver = "google"
|
||||
|
||||
|
||||
class GAEF(object):
|
||||
def __init__(self,name,op,value,apply):
|
||||
self.name=name=='id' and '__key__' or name
|
||||
self.op=op
|
||||
self.value=value
|
||||
self.apply=apply
|
||||
def __repr__(self):
|
||||
return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
|
||||
|
||||
|
||||
class GoogleDatastoreAdapter(NoSQLAdapter):
|
||||
"""
|
||||
NDB:
|
||||
|
||||
You can enable NDB by using adapter_args::
|
||||
|
||||
db = DAL('google:datastore', adapter_args={'ndb_settings':ndb_settings, 'use_ndb':True})
|
||||
|
||||
ndb_settings is optional and can be used for per model caching settings.
|
||||
It must be a dict in this form::
|
||||
|
||||
ndb_settings = {<table_name>:{<variable_name>:<variable_value>}}
|
||||
|
||||
See: https://developers.google.com/appengine/docs/python/ndb/cache
|
||||
"""
|
||||
|
||||
MAX_FETCH_LIMIT = 1000000
|
||||
uploads_in_blob = True
|
||||
types = {}
|
||||
# reconnect is not required for Datastore dbs
|
||||
reconnect = lambda *args, **kwargs: None
|
||||
|
||||
def file_exists(self, filename): pass
|
||||
def file_open(self, filename, mode='rb', lock=True): pass
|
||||
def file_close(self, fileobj): pass
|
||||
|
||||
REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.use_ndb = adapter_args.get('use_ndb',uri.startswith('google:datastore+ndb'))
|
||||
if self.use_ndb is True:
|
||||
self.types.update({
|
||||
'boolean': ndb.BooleanProperty,
|
||||
'string': (lambda **kwargs: ndb.StringProperty(**kwargs)),
|
||||
'text': ndb.TextProperty,
|
||||
'json': ndb.TextProperty,
|
||||
'password': ndb.StringProperty,
|
||||
'blob': ndb.BlobProperty,
|
||||
'upload': ndb.StringProperty,
|
||||
'integer': ndb.IntegerProperty,
|
||||
'bigint': ndb.IntegerProperty,
|
||||
'float': ndb.FloatProperty,
|
||||
'double': ndb.FloatProperty,
|
||||
'decimal': NDBDecimalProperty,
|
||||
'date': ndb.DateProperty,
|
||||
'time': ndb.TimeProperty,
|
||||
'datetime': ndb.DateTimeProperty,
|
||||
'id': None,
|
||||
'reference': ndb.IntegerProperty,
|
||||
'list:string': (lambda **kwargs: ndb.StringProperty(repeated=True,default=None, **kwargs)),
|
||||
'list:integer': (lambda **kwargs: ndb.IntegerProperty(repeated=True,default=None, **kwargs)),
|
||||
'list:reference': (lambda **kwargs: ndb.IntegerProperty(repeated=True,default=None, **kwargs)),
|
||||
})
|
||||
else:
|
||||
self.types.update({
|
||||
'boolean': gae.BooleanProperty,
|
||||
'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)),
|
||||
'text': gae.TextProperty,
|
||||
'json': gae.TextProperty,
|
||||
'password': gae.StringProperty,
|
||||
'blob': gae.BlobProperty,
|
||||
'upload': gae.StringProperty,
|
||||
'integer': gae.IntegerProperty,
|
||||
'bigint': gae.IntegerProperty,
|
||||
'float': gae.FloatProperty,
|
||||
'double': gae.FloatProperty,
|
||||
'decimal': GAEDecimalProperty,
|
||||
'date': gae.DateProperty,
|
||||
'time': gae.TimeProperty,
|
||||
'datetime': gae.DateTimeProperty,
|
||||
'id': None,
|
||||
'reference': gae.IntegerProperty,
|
||||
'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)),
|
||||
'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
|
||||
'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
|
||||
})
|
||||
self.db = db
|
||||
self.uri = uri
|
||||
self.dbengine = 'google:datastore'
|
||||
self.folder = folder
|
||||
db['_lastsql'] = ''
|
||||
self.db_codec = 'UTF-8'
|
||||
self._after_connection = after_connection
|
||||
self.pool_size = 0
|
||||
match = self.REGEX_NAMESPACE.match(uri)
|
||||
if match:
|
||||
namespace_manager.set_namespace(match.group('namespace'))
|
||||
self.keyfunc = (self.use_ndb and ndb.Key) or Key.from_path
|
||||
|
||||
self.ndb_settings = None
|
||||
if 'ndb_settings' in adapter_args:
|
||||
self.ndb_settings = adapter_args['ndb_settings']
|
||||
|
||||
def parse_id(self, value, field_type):
|
||||
return value
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
if fieldtype == "json":
|
||||
if have_serializers:
|
||||
return serializers.json(obj)
|
||||
elif simplejson:
|
||||
return simplejson.dumps(obj)
|
||||
else:
|
||||
raise Exception("Could not dump json object (missing json library)")
|
||||
else:
|
||||
return NoSQLAdapter.represent(self, obj, fieldtype)
|
||||
|
||||
def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
|
||||
myfields = {}
|
||||
for field in table:
|
||||
if isinstance(polymodel,Table) and field.name in polymodel.fields():
|
||||
continue
|
||||
attr = {}
|
||||
if isinstance(field.custom_qualifier, dict):
|
||||
#this is custom properties to add to the GAE field declartion
|
||||
attr = field.custom_qualifier
|
||||
field_type = field.type
|
||||
if isinstance(field_type, SQLCustomType):
|
||||
ftype = self.types[field_type.native or field_type.type](**attr)
|
||||
elif isinstance(field_type, ((self.use_ndb and ndb.Property) or gae.Property)):
|
||||
ftype = field_type
|
||||
elif field_type.startswith('id'):
|
||||
continue
|
||||
elif field_type.startswith('decimal'):
|
||||
precision, scale = field_type[7:].strip('()').split(',')
|
||||
precision = int(precision)
|
||||
scale = int(scale)
|
||||
dec_cls = (self.use_ndb and NDBDecimalProperty) or GAEDecimalProperty
|
||||
ftype = dec_cls(precision, scale, **attr)
|
||||
elif field_type.startswith('reference'):
|
||||
if field.notnull:
|
||||
attr = dict(required=True)
|
||||
ftype = self.types[field_type[:9]](**attr)
|
||||
elif field_type.startswith('list:reference'):
|
||||
if field.notnull:
|
||||
attr['required'] = True
|
||||
ftype = self.types[field_type[:14]](**attr)
|
||||
elif field_type.startswith('list:'):
|
||||
ftype = self.types[field_type](**attr)
|
||||
elif not field_type in self.types\
|
||||
or not self.types[field_type]:
|
||||
raise SyntaxError('Field: unknown field type: %s' % field_type)
|
||||
else:
|
||||
ftype = self.types[field_type](**attr)
|
||||
myfields[field.name] = ftype
|
||||
if not polymodel:
|
||||
model_cls = (self.use_ndb and ndb.Model) or gae.Model
|
||||
table._tableobj = classobj(table._tablename, (model_cls, ), myfields)
|
||||
if self.use_ndb:
|
||||
# Set NDB caching variables
|
||||
if self.ndb_settings and (table._tablename in self.ndb_settings):
|
||||
for k, v in self.ndb_settings.iteritems():
|
||||
setattr(table._tableobj, k, v)
|
||||
elif polymodel==True:
|
||||
pm_cls = (self.use_ndb and NDBPolyModel) or PolyModel
|
||||
table._tableobj = classobj(table._tablename, (pm_cls, ), myfields)
|
||||
elif isinstance(polymodel,Table):
|
||||
table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
|
||||
else:
|
||||
raise SyntaxError("polymodel must be None, True, a table or a tablename")
|
||||
return None
|
||||
|
||||
def expand(self,expression,field_type=None):
|
||||
if isinstance(expression,Field):
|
||||
if expression.type in ('text', 'blob', 'json'):
|
||||
raise SyntaxError('AppEngine does not index by: %s' % expression.type)
|
||||
return expression.name
|
||||
elif isinstance(expression, (Expression, Query)):
|
||||
if not expression.second is None:
|
||||
return expression.op(expression.first, expression.second)
|
||||
elif not expression.first is None:
|
||||
return expression.op(expression.first)
|
||||
else:
|
||||
return expression.op()
|
||||
elif field_type:
|
||||
return self.represent(expression,field_type)
|
||||
elif isinstance(expression,(list,tuple)):
|
||||
return ','.join([self.represent(item,field_type) for item in expression])
|
||||
else:
|
||||
return str(expression)
|
||||
|
||||
### TODO from gql.py Expression
|
||||
def AND(self,first,second):
|
||||
a = self.expand(first)
|
||||
b = self.expand(second)
|
||||
if b[0].name=='__key__' and a[0].name!='__key__':
|
||||
return b+a
|
||||
return a+b
|
||||
|
||||
def EQ(self,first,second=None):
|
||||
if isinstance(second, Key):
|
||||
return [GAEF(first.name,'=',second,lambda a,b:a==b)]
|
||||
return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
|
||||
|
||||
def NE(self,first,second=None):
|
||||
if first.type != 'id':
|
||||
return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
|
||||
else:
|
||||
if not second is None:
|
||||
second = self.keyfunc(first._tablename, long(second))
|
||||
return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
|
||||
|
||||
def LT(self,first,second=None):
|
||||
if first.type != 'id':
|
||||
return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
|
||||
else:
|
||||
second = self.keyfunc(first._tablename, long(second))
|
||||
return [GAEF(first.name,'<',second,lambda a,b:a<b)]
|
||||
|
||||
def LE(self,first,second=None):
|
||||
if first.type != 'id':
|
||||
return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
|
||||
else:
|
||||
second = self.keyfunc(first._tablename, long(second))
|
||||
return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
|
||||
|
||||
def GT(self,first,second=None):
|
||||
if first.type != 'id' or second==0 or second == '0':
|
||||
return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
|
||||
else:
|
||||
second = self.keyfunc(first._tablename, long(second))
|
||||
return [GAEF(first.name,'>',second,lambda a,b:a>b)]
|
||||
|
||||
def GE(self,first,second=None):
|
||||
if first.type != 'id':
|
||||
return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
|
||||
else:
|
||||
second = self.keyfunc(first._tablename, long(second))
|
||||
return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
|
||||
|
||||
def INVERT(self,first):
|
||||
return '-%s' % first.name
|
||||
|
||||
def COMMA(self,first,second):
|
||||
return '%s, %s' % (self.expand(first),self.expand(second))
|
||||
|
||||
def BELONGS(self,first,second=None):
|
||||
if not isinstance(second,(list, tuple, set)):
|
||||
raise SyntaxError("Not supported")
|
||||
if not self.use_ndb:
|
||||
if isinstance(second,set):
|
||||
second = list(second)
|
||||
if first.type == 'id':
|
||||
second = [self.keyfunc(first._tablename, int(i)) for i in second]
|
||||
return [GAEF(first.name,'in',second,lambda a,b:a in b)]
|
||||
|
||||
def CONTAINS(self,first,second,case_sensitive=False):
|
||||
# silently ignoring: GAE can only do case sensitive matches!
|
||||
if not first.type.startswith('list:'):
|
||||
raise SyntaxError("Not supported")
|
||||
return [GAEF(first.name,'=',self.expand(second,first.type[5:]),lambda a,b:b in a)]
|
||||
|
||||
def NOT(self,first):
|
||||
nops = { self.EQ: self.NE,
|
||||
self.NE: self.EQ,
|
||||
self.LT: self.GE,
|
||||
self.GT: self.LE,
|
||||
self.LE: self.GT,
|
||||
self.GE: self.LT}
|
||||
if not isinstance(first,Query):
|
||||
raise SyntaxError("Not suported")
|
||||
nop = nops.get(first.op,None)
|
||||
if not nop:
|
||||
raise SyntaxError("Not suported %s" % first.op.__name__)
|
||||
first.op = nop
|
||||
return self.expand(first)
|
||||
|
||||
def truncate(self,table,mode):
|
||||
self.db(self.db._adapter.id_query(table)).delete()
|
||||
|
||||
GAE_FILTER_OPTIONS = {
|
||||
'=': lambda q, t, p, v: q.filter(getattr(t,p) == v),
|
||||
'>': lambda q, t, p, v: q.filter(getattr(t,p) > v),
|
||||
'<': lambda q, t, p, v: q.filter(getattr(t,p) < v),
|
||||
'<=': lambda q, t, p, v: q.filter(getattr(t,p) <= v),
|
||||
'>=': lambda q, t, p, v: q.filter(getattr(t,p) >= v),
|
||||
'!=': lambda q, t, p, v: q.filter(getattr(t,p) != v),
|
||||
'in': lambda q, t, p, v: q.filter(getattr(t,p).IN(v)),
|
||||
}
|
||||
|
||||
def filter(self, query, tableobj, prop, op, value):
|
||||
return self.GAE_FILTER_OPTIONS[op](query, tableobj, prop, value)
|
||||
|
||||
def select_raw(self,query,fields=None,attributes=None,count_only=False):
|
||||
db = self.db
|
||||
fields = fields or []
|
||||
attributes = attributes or {}
|
||||
args_get = attributes.get
|
||||
new_fields = []
|
||||
|
||||
for item in fields:
|
||||
if isinstance(item,SQLALL):
|
||||
new_fields += item._table
|
||||
else:
|
||||
new_fields.append(item)
|
||||
|
||||
fields = new_fields
|
||||
if query:
|
||||
tablename = self.get_table(query)
|
||||
elif fields:
|
||||
tablename = fields[0].tablename
|
||||
query = db._adapter.id_query(fields[0].table)
|
||||
else:
|
||||
raise SyntaxError("Unable to determine a tablename")
|
||||
|
||||
if query:
|
||||
if use_common_filters(query):
|
||||
query = self.common_filter(query,[tablename])
|
||||
|
||||
#tableobj is a GAE/NDB Model class (or subclass)
|
||||
tableobj = db[tablename]._tableobj
|
||||
filters = self.expand(query)
|
||||
|
||||
projection = None
|
||||
if len(db[tablename].fields) == len(fields):
|
||||
#getting all fields, not a projection query
|
||||
projection = None
|
||||
elif args_get('projection') == True:
|
||||
projection = []
|
||||
for f in fields:
|
||||
if f.type in ['text', 'blob', 'json']:
|
||||
raise SyntaxError(
|
||||
"text and blob field types not allowed in projection queries")
|
||||
else:
|
||||
projection.append(f.name)
|
||||
|
||||
elif args_get('filterfields') is True:
|
||||
projection = []
|
||||
for f in fields:
|
||||
projection.append(f.name)
|
||||
|
||||
# real projection's can't include 'id'.
|
||||
# it will be added to the result later
|
||||
query_projection = [
|
||||
p for p in projection if \
|
||||
p != db[tablename]._id.name] if projection and \
|
||||
args_get('projection') == True\
|
||||
else None
|
||||
|
||||
cursor = args_get('reusecursor')
|
||||
cursor = cursor if isinstance(cursor, str) else None
|
||||
if self.use_ndb:
|
||||
qo = ndb.QueryOptions(projection=query_projection, cursor=cursor)
|
||||
items = tableobj.query(default_options=qo)
|
||||
else:
|
||||
items = gae.Query(tableobj, projection=query_projection, cursor=cursor)
|
||||
|
||||
for filter in filters:
|
||||
if (args_get('projection') == True and
|
||||
filter.name in query_projection and
|
||||
filter.op in ('=', '<=', '>=')):
|
||||
raise SyntaxError("projection fields cannot have equality filters")
|
||||
if filter.name=='__key__' and filter.op=='>' and filter.value==0:
|
||||
continue
|
||||
elif filter.name=='__key__' and filter.op=='=':
|
||||
if filter.value==0:
|
||||
items = []
|
||||
elif isinstance(filter.value, (self.use_ndb and ndb.Key) or Key):
|
||||
# key qeuries return a class instance,
|
||||
# can't use projection
|
||||
# extra values will be ignored in post-processing later
|
||||
item = filter.value.get() if self.use_ndb else tableobj.get(filter.value)
|
||||
items = [item] if item else []
|
||||
else:
|
||||
# key qeuries return a class instance,
|
||||
# can't use projection
|
||||
# extra values will be ignored in post-processing later
|
||||
item = tableobj.get_by_id(filter.value)
|
||||
items = [item] if item else []
|
||||
elif isinstance(items,list): # i.e. there is a single record!
|
||||
items = [i for i in items if filter.apply(
|
||||
getattr(item,filter.name),filter.value)]
|
||||
else:
|
||||
if filter.name=='__key__' and filter.op != 'in':
|
||||
items.order(tableobj._key) if self.use_ndb else items.order('__key__')
|
||||
if self.use_ndb:
|
||||
items = self.filter(items, tableobj, filter.name, filter.op, filter.value)
|
||||
else:
|
||||
items = items.filter('%s %s' % (filter.name,filter.op), filter.value)
|
||||
|
||||
if count_only:
|
||||
items = [len(items) if isinstance(items,list) else items.count()]
|
||||
elif not isinstance(items,list):
|
||||
query = items
|
||||
if args_get('left', None):
|
||||
raise SyntaxError('Set: no left join in appengine')
|
||||
if args_get('groupby', None):
|
||||
raise SyntaxError('Set: no groupby in appengine')
|
||||
orderby = args_get('orderby', False)
|
||||
if orderby:
|
||||
### THIS REALLY NEEDS IMPROVEMENT !!!
|
||||
if isinstance(orderby, (list, tuple)):
|
||||
orderby = xorify(orderby)
|
||||
if isinstance(orderby,Expression):
|
||||
orderby = self.expand(orderby)
|
||||
orders = orderby.split(', ')
|
||||
for order in orders:
|
||||
if self.use_ndb:
|
||||
#TODO There must be a better way
|
||||
def make_order(o):
|
||||
s = str(o)
|
||||
desc = s[0] == '-'
|
||||
s = (desc and s[1:]) or s
|
||||
return (desc and -getattr(tableobj, s)) or getattr(tableobj, s)
|
||||
_order = {'-id':-tableobj._key,'id':tableobj._key}.get(order)
|
||||
if _order is None:
|
||||
_order = make_order(order)
|
||||
query = query.order(_order)
|
||||
else:
|
||||
order={'-id':'-__key__','id':'__key__'}.get(order,order)
|
||||
query = query.order(order)
|
||||
|
||||
if args_get('limitby', None):
|
||||
(lmin, lmax) = attributes['limitby']
|
||||
limit, fetch_args = lmax-lmin, {'offset':lmin,'keys_only':True}
|
||||
|
||||
if self.use_ndb:
|
||||
keys, cursor, more = query.fetch_page(limit,**fetch_args)
|
||||
items = ndb.get_multi(keys)
|
||||
else:
|
||||
keys = query.fetch(limit, **fetch_args)
|
||||
items = gae.get(keys)
|
||||
cursor = query.cursor()
|
||||
#cursor is only useful if there was a limit and we didn't return
|
||||
# all results
|
||||
if args_get('reusecursor'):
|
||||
db['_lastcursor'] = cursor
|
||||
else:
|
||||
# if a limit is not specified, always return an iterator
|
||||
items = query
|
||||
|
||||
return (items, tablename, projection or db[tablename].fields)
|
||||
|
||||
def select(self,query,fields,attributes):
|
||||
"""
|
||||
This is the GAE version of select. Some notes to consider:
|
||||
- db['_lastsql'] is not set because there is not SQL statement string
|
||||
for a GAE query
|
||||
- 'nativeRef' is a magical fieldname used for self references on GAE
|
||||
- optional attribute 'projection' when set to True will trigger
|
||||
use of the GAE projection queries. note that there are rules for
|
||||
what is accepted imposed by GAE: each field must be indexed,
|
||||
projection queries cannot contain blob or text fields, and you
|
||||
cannot use == and also select that same field.
|
||||
see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
|
||||
- optional attribute 'filterfields' when set to True web2py will only
|
||||
parse the explicitly listed fields into the Rows object, even though
|
||||
all fields are returned in the query. This can be used to reduce
|
||||
memory usage in cases where true projection queries are not
|
||||
usable.
|
||||
- optional attribute 'reusecursor' allows use of cursor with queries
|
||||
that have the limitby attribute. Set the attribute to True for the
|
||||
first query, set it to the value of db['_lastcursor'] to continue
|
||||
a previous query. The user must save the cursor value between
|
||||
requests, and the filters must be identical. It is up to the user
|
||||
to follow google's limitations:
|
||||
https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
|
||||
"""
|
||||
|
||||
(items, tablename, fields) = self.select_raw(query,fields,attributes)
|
||||
# self.db['_lastsql'] = self._select(query,fields,attributes)
|
||||
rows = [[(t==self.db[tablename]._id.name and item) or \
|
||||
(t=='nativeRef' and item) or getattr(item, t) \
|
||||
for t in fields] for item in items]
|
||||
colnames = ['%s.%s' % (tablename, t) for t in fields]
|
||||
processor = attributes.get('processor',self.parse)
|
||||
return processor(rows,fields,colnames,False)
|
||||
|
||||
def parse_list_integers(self, value, field_type):
|
||||
return value[:] if self.use_ndb else value
|
||||
|
||||
def parse_list_strings(self, value, field_type):
|
||||
return value[:] if self.use_ndb else value
|
||||
|
||||
def count(self,query,distinct=None,limit=None):
|
||||
if distinct:
|
||||
raise RuntimeError("COUNT DISTINCT not supported")
|
||||
(items, tablename, fields) = self.select_raw(query,count_only=True)
|
||||
return items[0]
|
||||
|
||||
def delete(self,tablename, query):
|
||||
"""
|
||||
This function was changed on 2010-05-04 because according to
|
||||
http://code.google.com/p/googleappengine/issues/detail?id=3119
|
||||
GAE no longer supports deleting more than 1000 records.
|
||||
"""
|
||||
# self.db['_lastsql'] = self._delete(tablename,query)
|
||||
(items, tablename, fields) = self.select_raw(query)
|
||||
# items can be one item or a query
|
||||
if not isinstance(items,list):
|
||||
#use a keys_only query to ensure that this runs as a datastore
|
||||
# small operations
|
||||
leftitems = items.fetch(1000, keys_only=True)
|
||||
counter = 0
|
||||
while len(leftitems):
|
||||
counter += len(leftitems)
|
||||
if self.use_ndb:
|
||||
ndb.delete_multi(leftitems)
|
||||
else:
|
||||
gae.delete(leftitems)
|
||||
leftitems = items.fetch(1000, keys_only=True)
|
||||
else:
|
||||
counter = len(items)
|
||||
if self.use_ndb:
|
||||
ndb.delete_multi([item.key for item in items])
|
||||
else:
|
||||
gae.delete(items)
|
||||
return counter
|
||||
|
||||
def update(self,tablename,query,update_fields):
|
||||
# self.db['_lastsql'] = self._update(tablename,query,update_fields)
|
||||
(items, tablename, fields) = self.select_raw(query)
|
||||
counter = 0
|
||||
for item in items:
|
||||
for field, value in update_fields:
|
||||
setattr(item, field.name, self.represent(value,field.type))
|
||||
item.put()
|
||||
counter += 1
|
||||
LOGGER.info(str(counter))
|
||||
return counter
|
||||
|
||||
def insert(self,table,fields):
|
||||
dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
|
||||
# table._db['_lastsql'] = self._insert(table,fields)
|
||||
tmp = table._tableobj(**dfields)
|
||||
tmp.put()
|
||||
key = tmp.key if self.use_ndb else tmp.key()
|
||||
rid = Reference(key.id())
|
||||
(rid._table, rid._record, rid._gaekey) = (table, None, key)
|
||||
return rid
|
||||
|
||||
def bulk_insert(self,table,items):
|
||||
parsed_items = []
|
||||
for item in items:
|
||||
dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
|
||||
parsed_items.append(table._tableobj(**dfields))
|
||||
if self.use_ndb:
|
||||
ndb.put_multi(parsed_items)
|
||||
else:
|
||||
gae.put(parsed_items)
|
||||
return True
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,134 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class InformixAdapter(BaseAdapter):
|
||||
drivers = ('informixdb',)
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'BLOB SUB_TYPE 1',
|
||||
'json': 'BLOB SUB_TYPE 1',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BLOB SUB_TYPE 0',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INTEGER',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'DOUBLE PRECISION',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'CHAR(8)',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'SERIAL',
|
||||
'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'BLOB SUB_TYPE 1',
|
||||
'list:string': 'BLOB SUB_TYPE 1',
|
||||
'list:reference': 'BLOB SUB_TYPE 1',
|
||||
'big-id': 'BIGSERIAL',
|
||||
'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': 'REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s CONSTRAINT FK_%(table_name)s_%(field_name)s',
|
||||
'reference TFK': 'FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s CONSTRAINT TFK_%(table_name)s_%(field_name)s',
|
||||
}
|
||||
|
||||
def RANDOM(self):
|
||||
return 'Random()'
|
||||
|
||||
def NOT_NULL(self,default,field_type):
|
||||
return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
fetch_amt = lmax - lmin
|
||||
dbms_version = int(self.connection.dbms_version.split('.')[0])
|
||||
if lmin and (dbms_version >= 10):
|
||||
# Requires Informix 10.0+
|
||||
sql_s += ' SKIP %d' % (lmin, )
|
||||
if fetch_amt and (dbms_version >= 9):
|
||||
# Requires Informix 9.0+
|
||||
sql_s += ' FIRST %d' % (fetch_amt, )
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def represent_exceptions(self, obj, fieldtype):
|
||||
if fieldtype == 'date':
|
||||
if isinstance(obj, (datetime.date, datetime.datetime)):
|
||||
obj = obj.isoformat()[:10]
|
||||
else:
|
||||
obj = str(obj)
|
||||
return "to_date('%s','%%Y-%%m-%%d')" % obj
|
||||
elif fieldtype == 'datetime':
|
||||
if isinstance(obj, datetime.datetime):
|
||||
obj = obj.isoformat()[:19].replace('T',' ')
|
||||
elif isinstance(obj, datetime.date):
|
||||
obj = obj.isoformat()[:10]+' 00:00:00'
|
||||
else:
|
||||
obj = str(obj)
|
||||
return "to_date('%s','%%Y-%%m-%%d %%H:%%M:%%S')" % obj
|
||||
return None
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "informix"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
"Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
user = credential_decoder(user)
|
||||
password = credential_decoder(password)
|
||||
dsn = '%s@%s' % (db,host)
|
||||
driver_args.update(user=user,password=password,autocommit=True)
|
||||
def connector(dsn=dsn,driver_args=driver_args):
|
||||
return self.driver.connect(dsn,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def execute(self,command):
|
||||
if command[-1:]==';':
|
||||
command = command[:-1]
|
||||
return self.log_execute(command)
|
||||
|
||||
def lastrowid(self,table):
|
||||
return self.cursor.sqlerrd[1]
|
||||
|
||||
|
||||
class InformixSEAdapter(InformixAdapter):
|
||||
""" work in progress """
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
return 'SELECT %s %s FROM %s%s%s;' % \
|
||||
(sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def rowslice(self,rows,minimum=0,maximum=None):
|
||||
if maximum is None:
|
||||
return rows[minimum:]
|
||||
return rows[minimum:maximum]
|
||||
@@ -0,0 +1,147 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .._load import pyodbc
|
||||
from .base import BaseAdapter
|
||||
|
||||
# NOTE invalid database object name (ANSI-SQL wants
|
||||
# this form of name to be a delimited identifier)
|
||||
INGRES_SEQNAME='ii***lineitemsequence'
|
||||
|
||||
|
||||
class IngresAdapter(BaseAdapter):
|
||||
drivers = ('pyodbc',)
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'CLOB',
|
||||
'json': 'CLOB',
|
||||
'password': 'VARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes?
|
||||
'blob': 'BLOB',
|
||||
'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type?
|
||||
'integer': 'INTEGER4', # or int8...
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT8',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'ANSIDATE',
|
||||
'time': 'TIME WITHOUT TIME ZONE',
|
||||
'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
|
||||
'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
|
||||
'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'CLOB',
|
||||
'list:string': 'CLOB',
|
||||
'list:reference': 'CLOB',
|
||||
'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
|
||||
'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO
|
||||
}
|
||||
|
||||
def LEFT_JOIN(self):
|
||||
return 'LEFT OUTER JOIN'
|
||||
|
||||
def RANDOM(self):
|
||||
return 'RANDOM()'
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
fetch_amt = lmax - lmin
|
||||
if fetch_amt:
|
||||
sql_s += ' FIRST %d ' % (fetch_amt, )
|
||||
if lmin:
|
||||
# Requires Ingres 9.2+
|
||||
sql_o += ' OFFSET %d' % (lmin, )
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "ingres"
|
||||
self._driver = pyodbc
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
connstr = uri.split(':', 1)[1]
|
||||
# Simple URI processing
|
||||
connstr = connstr.lstrip()
|
||||
while connstr.startswith('/'):
|
||||
connstr = connstr[1:]
|
||||
if '=' in connstr:
|
||||
# Assume we have a regular ODBC connection string and just use it
|
||||
ruri = connstr
|
||||
else:
|
||||
# Assume only (local) dbname is passed in with OS auth
|
||||
database_name = connstr
|
||||
default_driver_name = 'Ingres'
|
||||
vnode = '(local)'
|
||||
servertype = 'ingres'
|
||||
ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
|
||||
def connector(cnxn=ruri,driver_args=driver_args):
|
||||
return self.driver.connect(cnxn,**driver_args)
|
||||
|
||||
self.connector = connector
|
||||
|
||||
# TODO if version is >= 10, set types['id'] to Identity column, see http://community.actian.com/wiki/Using_Ingres_Identity_Columns
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def create_sequence_and_triggers(self, query, table, **args):
|
||||
# post create table auto inc code (if needed)
|
||||
# modify table to btree for performance....
|
||||
# Older Ingres releases could use rule/trigger like Oracle above.
|
||||
if hasattr(table,'_primarykey'):
|
||||
modify_tbl_sql = 'modify %s to btree unique on %s' % \
|
||||
(table._tablename,
|
||||
', '.join(["'%s'" % x for x in table.primarykey]))
|
||||
self.execute(modify_tbl_sql)
|
||||
else:
|
||||
tmp_seqname='%s_iisq' % table._tablename
|
||||
query=query.replace(INGRES_SEQNAME, tmp_seqname)
|
||||
self.execute('create sequence %s' % tmp_seqname)
|
||||
self.execute(query)
|
||||
self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
|
||||
|
||||
|
||||
def lastrowid(self,table):
|
||||
tmp_seqname='%s_iisq' % table
|
||||
self.execute('select current value for %s' % tmp_seqname)
|
||||
return long(self.cursor.fetchone()[0]) # don't really need int type cast here...
|
||||
|
||||
|
||||
class IngresUnicodeAdapter(IngresAdapter):
|
||||
|
||||
drivers = ('pyodbc',)
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'NVARCHAR(%(length)s)',
|
||||
'text': 'NCLOB',
|
||||
'json': 'NCLOB',
|
||||
'password': 'NVARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes?
|
||||
'blob': 'BLOB',
|
||||
'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type?
|
||||
'integer': 'INTEGER4', # or int8...
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT8',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'ANSIDATE',
|
||||
'time': 'TIME WITHOUT TIME ZONE',
|
||||
'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
|
||||
'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
|
||||
'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'NCLOB',
|
||||
'list:string': 'NCLOB',
|
||||
'list:reference': 'NCLOB',
|
||||
'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
|
||||
'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO
|
||||
}
|
||||
@@ -0,0 +1,575 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import logging
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from ..objects import Table, Query, Field, Expression
|
||||
from ..helpers.classes import SQLALL
|
||||
from ..helpers.methods import xorify
|
||||
from .base import NoSQLAdapter
|
||||
|
||||
class MongoDBAdapter(NoSQLAdapter):
|
||||
drivers = ('pymongo',)
|
||||
driver_auto_json = ['loads','dumps']
|
||||
|
||||
uploads_in_blob = False
|
||||
|
||||
types = {
|
||||
'boolean': bool,
|
||||
'string': str,
|
||||
'text': str,
|
||||
'json': str,
|
||||
'password': str,
|
||||
'blob': str,
|
||||
'upload': str,
|
||||
'integer': long,
|
||||
'bigint': long,
|
||||
'float': float,
|
||||
'double': float,
|
||||
'date': datetime.date,
|
||||
'time': datetime.time,
|
||||
'datetime': datetime.datetime,
|
||||
'id': long,
|
||||
'reference': long,
|
||||
'list:string': list,
|
||||
'list:integer': list,
|
||||
'list:reference': list,
|
||||
}
|
||||
|
||||
error_messages = {"javascript_needed": "This must yet be replaced" +
|
||||
" with javascript in order to work."}
|
||||
|
||||
def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
|
||||
pool_size=0, folder=None, db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
|
||||
self.db = db
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args)
|
||||
import random
|
||||
from bson.objectid import ObjectId
|
||||
from bson.son import SON
|
||||
import pymongo.uri_parser
|
||||
|
||||
m = pymongo.uri_parser.parse_uri(uri)
|
||||
|
||||
self.SON = SON
|
||||
self.ObjectId = ObjectId
|
||||
self.random = random
|
||||
|
||||
self.dbengine = 'mongodb'
|
||||
self.folder = folder
|
||||
db['_lastsql'] = ''
|
||||
self.db_codec = 'UTF-8'
|
||||
self._after_connection = after_connection
|
||||
self.pool_size = pool_size
|
||||
#this is the minimum amount of replicates that it should wait
|
||||
# for on insert/update
|
||||
self.minimumreplication = adapter_args.get('minimumreplication',0)
|
||||
# by default all inserts and selects are performand asynchronous,
|
||||
# but now the default is
|
||||
# synchronous, except when overruled by either this default or
|
||||
# function parameter
|
||||
self.safe = adapter_args.get('safe',True)
|
||||
# load user setting for uploads in blob storage
|
||||
self.uploads_in_blob = adapter_args.get('uploads_in_blob', False)
|
||||
|
||||
if isinstance(m,tuple):
|
||||
m = {"database" : m[1]}
|
||||
if m.get('database') is None:
|
||||
raise SyntaxError("Database is required!")
|
||||
|
||||
def connector(uri=self.uri,m=m):
|
||||
# Connection() is deprecated
|
||||
if hasattr(self.driver, "MongoClient"):
|
||||
Connection = self.driver.MongoClient
|
||||
else:
|
||||
Connection = self.driver.Connection
|
||||
return Connection(uri)[m.get('database')]
|
||||
|
||||
self.reconnect(connector,cursor=False)
|
||||
|
||||
def object_id(self, arg=None):
|
||||
""" Convert input to a valid Mongodb ObjectId instance
|
||||
|
||||
self.object_id("<random>") -> ObjectId (not unique) instance """
|
||||
if not arg:
|
||||
arg = 0
|
||||
if isinstance(arg, basestring):
|
||||
# we assume an integer as default input
|
||||
rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
|
||||
if arg.isdigit() and (not rawhex):
|
||||
arg = int(arg)
|
||||
elif arg == "<random>":
|
||||
arg = int("0x%sL" % \
|
||||
"".join([self.random.choice("0123456789abcdef") \
|
||||
for x in range(24)]), 0)
|
||||
elif arg.isalnum():
|
||||
if not arg.startswith("0x"):
|
||||
arg = "0x%s" % arg
|
||||
try:
|
||||
arg = int(arg, 0)
|
||||
except ValueError, e:
|
||||
raise ValueError(
|
||||
"invalid objectid argument string: %s" % e)
|
||||
else:
|
||||
raise ValueError("Invalid objectid argument string. " +
|
||||
"Requires an integer or base 16 value")
|
||||
elif isinstance(arg, self.ObjectId):
|
||||
return arg
|
||||
|
||||
if not isinstance(arg, (int, long)):
|
||||
raise TypeError("object_id argument must be of type " +
|
||||
"ObjectId or an objectid representable integer")
|
||||
hexvalue = hex(arg)[2:].rstrip('L').zfill(24)
|
||||
return self.ObjectId(hexvalue)
|
||||
|
||||
def parse_reference(self, value, field_type):
|
||||
# here we have to check for ObjectID before base parse
|
||||
if isinstance(value, self.ObjectId):
|
||||
value = long(str(value), 16)
|
||||
return super(MongoDBAdapter,
|
||||
self).parse_reference(value, field_type)
|
||||
|
||||
def parse_id(self, value, field_type):
|
||||
if isinstance(value, self.ObjectId):
|
||||
value = long(str(value), 16)
|
||||
return super(MongoDBAdapter,
|
||||
self).parse_id(value, field_type)
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
# the base adatpter does not support MongoDB ObjectId
|
||||
if isinstance(obj, self.ObjectId):
|
||||
value = obj
|
||||
else:
|
||||
value = NoSQLAdapter.represent(self, obj, fieldtype)
|
||||
# reference types must be convert to ObjectID
|
||||
if fieldtype =='date':
|
||||
if value is None:
|
||||
return value
|
||||
# this piece of data can be stripped off based on the fieldtype
|
||||
t = datetime.time(0, 0, 0)
|
||||
# mongodb doesn't has a date object and so it must datetime,
|
||||
# string or integer
|
||||
return datetime.datetime.combine(value, t)
|
||||
elif fieldtype == 'time':
|
||||
if value is None:
|
||||
return value
|
||||
# this piece of data can be stripped of based on the fieldtype
|
||||
d = datetime.date(2000, 1, 1)
|
||||
# mongodb doesn't has a time object and so it must datetime,
|
||||
# string or integer
|
||||
return datetime.datetime.combine(d, value)
|
||||
elif fieldtype == "blob":
|
||||
if value is None:
|
||||
return value
|
||||
from bson import Binary
|
||||
if not isinstance(value, Binary):
|
||||
if not isinstance(value, basestring):
|
||||
return Binary(str(value))
|
||||
return Binary(value)
|
||||
return value
|
||||
elif (isinstance(fieldtype, basestring) and
|
||||
fieldtype.startswith('list:')):
|
||||
if fieldtype.startswith('list:reference'):
|
||||
newval = []
|
||||
for v in value:
|
||||
newval.append(self.object_id(v))
|
||||
return newval
|
||||
return value
|
||||
elif ((isinstance(fieldtype, basestring) and
|
||||
fieldtype.startswith("reference")) or
|
||||
(isinstance(fieldtype, Table)) or fieldtype=="id"):
|
||||
value = self.object_id(value)
|
||||
return value
|
||||
|
||||
def create_table(self, table, migrate=True, fake_migrate=False,
|
||||
polymodel=None, isCapped=False):
|
||||
if isCapped:
|
||||
raise RuntimeError("Not implemented")
|
||||
|
||||
def count(self, query, distinct=None, snapshot=True):
|
||||
if distinct:
|
||||
raise RuntimeError("COUNT DISTINCT not supported")
|
||||
if not isinstance(query,Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
tablename = self.get_table(query)
|
||||
return long(self.select(query,[self.db[tablename]._id], {},
|
||||
count=True,snapshot=snapshot)['count'])
|
||||
# Maybe it would be faster if we just implemented the pymongo
|
||||
# .count() function which is probably quicker?
|
||||
# therefor call __select() connection[table].find(query).count()
|
||||
# Since this will probably reduce the return set?
|
||||
|
||||
def expand(self, expression, field_type=None):
|
||||
if isinstance(expression, Query):
|
||||
# any query using 'id':=
|
||||
# set name as _id (as per pymongo/mongodb primary key)
|
||||
# convert second arg to an objectid field
|
||||
# (if its not already)
|
||||
# if second arg is 0 convert to objectid
|
||||
if isinstance(expression.first,Field) and \
|
||||
((expression.first.type == 'id') or \
|
||||
("reference" in expression.first.type)):
|
||||
if expression.first.type == 'id':
|
||||
expression.first.name = '_id'
|
||||
# cast to Mongo ObjectId
|
||||
if isinstance(expression.second, (tuple, list, set)):
|
||||
expression.second = [self.object_id(item) for
|
||||
item in expression.second]
|
||||
else:
|
||||
expression.second = self.object_id(expression.second)
|
||||
result = expression.op(expression.first, expression.second)
|
||||
|
||||
if isinstance(expression, Field):
|
||||
if expression.type=='id':
|
||||
result = "_id"
|
||||
else:
|
||||
result = expression.name
|
||||
elif isinstance(expression, (Expression, Query)):
|
||||
if not expression.second is None:
|
||||
result = expression.op(expression.first, expression.second)
|
||||
elif not expression.first is None:
|
||||
result = expression.op(expression.first)
|
||||
elif not isinstance(expression.op, str):
|
||||
result = expression.op()
|
||||
else:
|
||||
result = expression.op
|
||||
elif field_type:
|
||||
result = self.represent(expression,field_type)
|
||||
elif isinstance(expression,(list,tuple)):
|
||||
result = [self.represent(item,field_type) for
|
||||
item in expression]
|
||||
else:
|
||||
result = expression
|
||||
return result
|
||||
|
||||
def drop(self, table, mode=''):
|
||||
ctable = self.connection[table._tablename]
|
||||
ctable.drop()
|
||||
|
||||
def truncate(self, table, mode, safe=None):
|
||||
if safe == None:
|
||||
safe=self.safe
|
||||
ctable = self.connection[table._tablename]
|
||||
ctable.remove(None, safe=True)
|
||||
|
||||
def select(self, query, fields, attributes, count=False,
|
||||
snapshot=False):
|
||||
mongofields_dict = self.SON()
|
||||
mongoqry_dict = {}
|
||||
new_fields, mongosort_list = [], []
|
||||
# try an orderby attribute
|
||||
orderby = attributes.get('orderby', False)
|
||||
limitby = attributes.get('limitby', False)
|
||||
# distinct = attributes.get('distinct', False)
|
||||
if 'for_update' in attributes:
|
||||
logging.warn('mongodb does not support for_update')
|
||||
for key in set(attributes.keys())-set(('limitby',
|
||||
'orderby','for_update')):
|
||||
if attributes[key] is not None:
|
||||
logging.warn('select attribute not implemented: %s' % key)
|
||||
if limitby:
|
||||
limitby_skip, limitby_limit = limitby[0], int(limitby[1])
|
||||
else:
|
||||
limitby_skip = limitby_limit = 0
|
||||
if orderby:
|
||||
if isinstance(orderby, (list, tuple)):
|
||||
orderby = xorify(orderby)
|
||||
# !!!! need to add 'random'
|
||||
for f in self.expand(orderby).split(','):
|
||||
if f.startswith('-'):
|
||||
mongosort_list.append((f[1:], -1))
|
||||
else:
|
||||
mongosort_list.append((f, 1))
|
||||
for item in fields:
|
||||
if isinstance(item, SQLALL):
|
||||
new_fields += item._table
|
||||
else:
|
||||
new_fields.append(item)
|
||||
fields = new_fields
|
||||
if isinstance(query,Query):
|
||||
tablename = self.get_table(query)
|
||||
elif len(fields) != 0:
|
||||
tablename = fields[0].tablename
|
||||
else:
|
||||
raise SyntaxError("The table name could not be found in " +
|
||||
"the query nor from the select statement.")
|
||||
mongoqry_dict = self.expand(query)
|
||||
fields = fields or self.db[tablename]
|
||||
for field in fields:
|
||||
mongofields_dict[field.name] = 1
|
||||
ctable = self.connection[tablename]
|
||||
if count:
|
||||
return {'count' : ctable.find(
|
||||
mongoqry_dict, mongofields_dict,
|
||||
skip=limitby_skip, limit=limitby_limit,
|
||||
sort=mongosort_list, snapshot=snapshot).count()}
|
||||
else:
|
||||
# pymongo cursor object
|
||||
mongo_list_dicts = ctable.find(mongoqry_dict,
|
||||
mongofields_dict, skip=limitby_skip,
|
||||
limit=limitby_limit, sort=mongosort_list,
|
||||
snapshot=snapshot)
|
||||
rows = []
|
||||
# populate row in proper order
|
||||
# Here we replace ._id with .id to follow the standard naming
|
||||
colnames = []
|
||||
newnames = []
|
||||
for field in fields:
|
||||
colname = str(field)
|
||||
colnames.append(colname)
|
||||
tablename, fieldname = colname.split(".")
|
||||
if fieldname == "_id":
|
||||
# Mongodb reserved uuid key
|
||||
field.name = "id"
|
||||
newnames.append(".".join((tablename, field.name)))
|
||||
|
||||
for record in mongo_list_dicts:
|
||||
row=[]
|
||||
for colname in colnames:
|
||||
tablename, fieldname = colname.split(".")
|
||||
# switch to Mongo _id uuids for retrieving
|
||||
# record id's
|
||||
if fieldname == "id": fieldname = "_id"
|
||||
if fieldname in record:
|
||||
value = record[fieldname]
|
||||
else:
|
||||
value = None
|
||||
row.append(value)
|
||||
rows.append(row)
|
||||
processor = attributes.get('processor', self.parse)
|
||||
result = processor(rows, fields, newnames, False)
|
||||
return result
|
||||
|
||||
def insert(self, table, fields, safe=None):
|
||||
"""Safe determines whether a asynchronous request is done or a
|
||||
synchronous action is done
|
||||
For safety, we use by default synchronous requests"""
|
||||
|
||||
values = dict()
|
||||
if safe is None:
|
||||
safe = self.safe
|
||||
ctable = self.connection[table._tablename]
|
||||
for k, v in fields:
|
||||
if not k.name in ["id", "safe"]:
|
||||
fieldname = k.name
|
||||
fieldtype = table[k.name].type
|
||||
values[fieldname] = self.represent(v, fieldtype)
|
||||
|
||||
ctable.insert(values, safe=safe)
|
||||
return long(str(values['_id']), 16)
|
||||
|
||||
def update(self, tablename, query, fields, safe=None):
|
||||
if safe == None:
|
||||
safe = self.safe
|
||||
# return amount of adjusted rows or zero, but no exceptions
|
||||
# @ related not finding the result
|
||||
if not isinstance(query, Query):
|
||||
raise RuntimeError("Not implemented")
|
||||
amount = self.count(query, False)
|
||||
if not isinstance(query, Query):
|
||||
raise SyntaxError("Not Supported")
|
||||
filter = None
|
||||
if query:
|
||||
filter = self.expand(query)
|
||||
# do not try to update id fields to avoid backend errors
|
||||
modify = {'$set': dict((k.name, self.represent(v, k.type)) for
|
||||
k, v in fields if (not k.name in ("_id", "id")))}
|
||||
try:
|
||||
result = self.connection[tablename].update(filter,
|
||||
modify, multi=True, safe=safe)
|
||||
if safe:
|
||||
try:
|
||||
# if result count is available fetch it
|
||||
return result["n"]
|
||||
except (KeyError, AttributeError, TypeError):
|
||||
return amount
|
||||
else:
|
||||
return amount
|
||||
except Exception, e:
|
||||
# TODO Reverse update query to verifiy that the query succeded
|
||||
raise RuntimeError("uncaught exception when updating rows: %s" % e)
|
||||
|
||||
def delete(self, tablename, query, safe=None):
|
||||
if safe is None:
|
||||
safe = self.safe
|
||||
amount = 0
|
||||
amount = self.count(query, False)
|
||||
if not isinstance(query, Query):
|
||||
raise RuntimeError("query type %s is not supported" % \
|
||||
type(query))
|
||||
filter = self.expand(query)
|
||||
self.connection[tablename].remove(filter, safe=safe)
|
||||
return amount
|
||||
|
||||
def bulk_insert(self, table, items):
|
||||
return [self.insert(table,item) for item in items]
|
||||
|
||||
## OPERATORS
|
||||
def INVERT(self, first):
|
||||
#print "in invert first=%s" % first
|
||||
return '-%s' % self.expand(first)
|
||||
|
||||
# TODO This will probably not work:(
|
||||
def NOT(self, first):
|
||||
return {'$not': self.expand(first)}
|
||||
|
||||
def AND(self,first,second):
|
||||
# pymongo expects: .find({'$and': [{'x':'1'}, {'y':'2'}]})
|
||||
return {'$and': [self.expand(first),self.expand(second)]}
|
||||
|
||||
def OR(self,first,second):
|
||||
# pymongo expects: .find({'$or': [{'name':'1'}, {'name':'2'}]})
|
||||
return {'$or': [self.expand(first),self.expand(second)]}
|
||||
|
||||
def BELONGS(self, first, second):
|
||||
if isinstance(second, str):
|
||||
return {self.expand(first) : {"$in" : [ second[:-1]]} }
|
||||
elif second==[] or second==() or second==set():
|
||||
return {1:0}
|
||||
items = [self.expand(item, first.type) for item in second]
|
||||
return {self.expand(first) : {"$in" : items} }
|
||||
|
||||
def EQ(self,first,second=None):
|
||||
result = {}
|
||||
result[self.expand(first)] = self.expand(second)
|
||||
return result
|
||||
|
||||
def NE(self, first, second=None):
|
||||
result = {}
|
||||
result[self.expand(first)] = {'$ne': self.expand(second)}
|
||||
return result
|
||||
|
||||
def LT(self,first,second=None):
|
||||
if second is None:
|
||||
raise RuntimeError("Cannot compare %s < None" % first)
|
||||
result = {}
|
||||
result[self.expand(first)] = {'$lt': self.expand(second)}
|
||||
return result
|
||||
|
||||
def LE(self,first,second=None):
|
||||
if second is None:
|
||||
raise RuntimeError("Cannot compare %s <= None" % first)
|
||||
result = {}
|
||||
result[self.expand(first)] = {'$lte': self.expand(second)}
|
||||
return result
|
||||
|
||||
def GT(self,first,second):
|
||||
result = {}
|
||||
result[self.expand(first)] = {'$gt': self.expand(second)}
|
||||
return result
|
||||
|
||||
def GE(self,first,second=None):
|
||||
if second is None:
|
||||
raise RuntimeError("Cannot compare %s >= None" % first)
|
||||
result = {}
|
||||
result[self.expand(first)] = {'$gte': self.expand(second)}
|
||||
return result
|
||||
|
||||
def ADD(self, first, second):
|
||||
raise NotImplementedError(self.error_messages["javascript_needed"])
|
||||
return '%s + %s' % (self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def SUB(self, first, second):
|
||||
raise NotImplementedError(self.error_messages["javascript_needed"])
|
||||
return '(%s - %s)' % (self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def MUL(self, first, second):
|
||||
raise NotImplementedError(self.error_messages["javascript_needed"])
|
||||
return '(%s * %s)' % (self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def DIV(self, first, second):
|
||||
raise NotImplementedError(self.error_messages["javascript_needed"])
|
||||
return '(%s / %s)' % (self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def MOD(self, first, second):
|
||||
raise NotImplementedError(self.error_messages["javascript_needed"])
|
||||
return '(%s %% %s)' % (self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def AS(self, first, second):
|
||||
raise NotImplementedError(self.error_messages["javascript_needed"])
|
||||
return '%s AS %s' % (self.expand(first), second)
|
||||
|
||||
# We could implement an option that simulates a full featured SQL
|
||||
# database. But I think the option should be set explicit or
|
||||
# implemented as another library.
|
||||
def ON(self, first, second):
|
||||
raise NotImplementedError("This is not possible in NoSQL" +
|
||||
" but can be simulated with a wrapper.")
|
||||
return '%s ON %s' % (self.expand(first), self.expand(second))
|
||||
|
||||
# BLOW ARE TWO IMPLEMENTATIONS OF THE SAME FUNCITONS
|
||||
# WHICH ONE IS BEST?
|
||||
|
||||
def COMMA(self, first, second):
|
||||
return '%s, %s' % (self.expand(first), self.expand(second))
|
||||
|
||||
def LIKE(self, first, second):
|
||||
#escaping regex operators?
|
||||
return {self.expand(first): ('%s' % \
|
||||
self.expand(second, 'string').replace('%','/'))}
|
||||
|
||||
def ILIKE(self, first, second):
|
||||
val = second if isinstance(second,self.ObjectId) else {
|
||||
'$regex': second.replace('%', ''), '$options': 'i'}
|
||||
return {self.expand(first): val}
|
||||
|
||||
def STARTSWITH(self, first, second):
|
||||
#escaping regex operators?
|
||||
return {self.expand(first): ('/^%s/' % \
|
||||
self.expand(second, 'string'))}
|
||||
|
||||
def ENDSWITH(self, first, second):
|
||||
#escaping regex operators?
|
||||
return {self.expand(first): ('/%s^/' % \
|
||||
self.expand(second, 'string'))}
|
||||
|
||||
def CONTAINS(self, first, second, case_sensitive=False):
|
||||
# silently ignore, only case sensitive
|
||||
# There is a technical difference, but mongodb doesn't support
|
||||
# that, but the result will be the same
|
||||
val = second if isinstance(second,self.ObjectId) else \
|
||||
{'$regex':".*" + re.escape(self.expand(second, 'string')) + ".*"}
|
||||
return {self.expand(first) : val}
|
||||
|
||||
def LIKE(self, first, second):
|
||||
import re
|
||||
return {self.expand(first): {'$regex': \
|
||||
re.escape(self.expand(second,
|
||||
'string')).replace('%','.*')}}
|
||||
|
||||
#TODO verify full compatibilty with official SQL Like operator
|
||||
def STARTSWITH(self, first, second):
|
||||
#TODO Solve almost the same problem as with endswith
|
||||
import re
|
||||
return {self.expand(first): {'$regex' : '^' +
|
||||
re.escape(self.expand(second,
|
||||
'string'))}}
|
||||
|
||||
#TODO verify full compatibilty with official SQL Like operator
|
||||
def ENDSWITH(self, first, second):
|
||||
#escaping regex operators?
|
||||
#TODO if searched for a name like zsa_corbitt and the function
|
||||
# is endswith('a') then this is also returned.
|
||||
# Aldo it end with a t
|
||||
import re
|
||||
return {self.expand(first): {'$regex': \
|
||||
re.escape(self.expand(second, 'string')) + '$'}}
|
||||
|
||||
#TODO verify full compatibilty with official oracle contains operator
|
||||
def CONTAINS(self, first, second, case_sensitive=False):
|
||||
# silently ignore, only case sensitive
|
||||
#There is a technical difference, but mongodb doesn't support
|
||||
# that, but the result will be the same
|
||||
#TODO contains operators need to be transformed to Regex
|
||||
return {self.expand(first) : {'$regex': \
|
||||
".*" + re.escape(self.expand(second, 'string')) + ".*"}}
|
||||
|
||||
@@ -0,0 +1,513 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from .._globals import IDENTITY, LOGGER
|
||||
from ..helpers.methods import varquote_aux
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class MSSQLAdapter(BaseAdapter):
|
||||
drivers = ('pyodbc',)
|
||||
T_SEP = 'T'
|
||||
|
||||
QUOTE_TEMPLATE = '"%s"'
|
||||
|
||||
types = {
|
||||
'boolean': 'BIT',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'TEXT',
|
||||
'json': 'TEXT',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'IMAGE',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATETIME',
|
||||
'time': 'CHAR(8)',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'INT IDENTITY PRIMARY KEY',
|
||||
'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'TEXT',
|
||||
'list:string': 'TEXT',
|
||||
'list:reference': 'TEXT',
|
||||
'geometry': 'geometry',
|
||||
'geography': 'geography',
|
||||
'big-id': 'BIGINT IDENTITY PRIMARY KEY',
|
||||
'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def concat_add(self,tablename):
|
||||
return '; ALTER TABLE %s ADD ' % tablename
|
||||
|
||||
def varquote(self,name):
|
||||
return varquote_aux(name,'[%s]')
|
||||
|
||||
def EXTRACT(self,field,what):
|
||||
return "DATEPART(%s,%s)" % (what, self.expand(field))
|
||||
|
||||
def LEFT_JOIN(self):
|
||||
return 'LEFT OUTER JOIN'
|
||||
|
||||
def RANDOM(self):
|
||||
return 'NEWID()'
|
||||
|
||||
def ALLOW_NULL(self):
|
||||
return ' NULL'
|
||||
|
||||
def CAST(self, first, second):
|
||||
return first # apparently no cast necessary in MSSQL
|
||||
|
||||
def SUBSTRING(self,field,parameters):
|
||||
return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
|
||||
|
||||
def PRIMARY_KEY(self,key):
|
||||
return 'PRIMARY KEY CLUSTERED (%s)' % key
|
||||
|
||||
def AGGREGATE(self, first, what):
|
||||
if what == 'LENGTH':
|
||||
what = 'LEN'
|
||||
return "%s(%s)" % (what, self.expand(first))
|
||||
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
sql_s += ' TOP %i' % lmax
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
TRUE = 1
|
||||
FALSE = 0
|
||||
|
||||
REGEX_DSN = re.compile('^(?P<dsn>.+)$')
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
|
||||
REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, srid=4326,
|
||||
after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "mssql"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.srid = srid
|
||||
self.find_or_make_work_folder()
|
||||
# ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8
|
||||
ruri = uri.split('://',1)[1]
|
||||
if '@' not in ruri:
|
||||
try:
|
||||
m = self.REGEX_DSN.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
'Parsing uri string(%s) has no result' % self.uri)
|
||||
dsn = m.group('dsn')
|
||||
if not dsn:
|
||||
raise SyntaxError('DSN required')
|
||||
except SyntaxError:
|
||||
e = sys.exc_info()[1]
|
||||
LOGGER.error('NdGpatch error')
|
||||
raise e
|
||||
# was cnxn = 'DSN=%s' % dsn
|
||||
cnxn = dsn
|
||||
else:
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
"Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
port = m.group('port') or '1433'
|
||||
# Parse the optional url name-value arg pairs after the '?'
|
||||
# (in the form of arg1=value1&arg2=value2&...)
|
||||
# Default values (drivers like FreeTDS insist on uppercase parameter keys)
|
||||
argsdict = { 'DRIVER':'{SQL Server}' }
|
||||
urlargs = m.group('urlargs') or ''
|
||||
for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
|
||||
argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
|
||||
urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
|
||||
cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
|
||||
% (host, port, db, user, password, urlargs)
|
||||
def connector(cnxn=cnxn,driver_args=driver_args):
|
||||
return self.driver.connect(cnxn,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def lastrowid(self,table):
|
||||
#self.execute('SELECT @@IDENTITY;')
|
||||
self.execute('SELECT SCOPE_IDENTITY();')
|
||||
return long(self.cursor.fetchone()[0])
|
||||
|
||||
def rowslice(self,rows,minimum=0,maximum=None):
|
||||
if maximum is None:
|
||||
return rows[minimum:]
|
||||
return rows[minimum:maximum]
|
||||
|
||||
def EPOCH(self, first):
|
||||
return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
|
||||
|
||||
def CONCAT(self, *items):
|
||||
return '(%s)' % ' + '.join(self.expand(x,'string') for x in items)
|
||||
|
||||
# GIS Spatial Extensions
|
||||
|
||||
# No STAsGeoJSON in MSSQL
|
||||
|
||||
def ST_ASTEXT(self, first):
|
||||
return '%s.STAsText()' %(self.expand(first))
|
||||
|
||||
def ST_CONTAINS(self, first, second):
|
||||
return '%s.STContains(%s)=1' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_DISTANCE(self, first, second):
|
||||
return '%s.STDistance(%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_EQUALS(self, first, second):
|
||||
return '%s.STEquals(%s)=1' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_INTERSECTS(self, first, second):
|
||||
return '%s.STIntersects(%s)=1' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_OVERLAPS(self, first, second):
|
||||
return '%s.STOverlaps(%s)=1' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
# no STSimplify in MSSQL
|
||||
|
||||
def ST_TOUCHES(self, first, second):
|
||||
return '%s.STTouches(%s)=1' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_WITHIN(self, first, second):
|
||||
return '%s.STWithin(%s)=1' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
field_is_type = fieldtype.startswith
|
||||
if field_is_type('geometry'):
|
||||
srid = 0 # MS SQL default srid for geometry
|
||||
geotype, parms = fieldtype[:-1].split('(')
|
||||
if parms:
|
||||
srid = parms
|
||||
return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
|
||||
elif fieldtype == 'geography':
|
||||
srid = 4326 # MS SQL default srid for geography
|
||||
geotype, parms = fieldtype[:-1].split('(')
|
||||
if parms:
|
||||
srid = parms
|
||||
return "geography::STGeomFromText('%s',%s)" %(obj, srid)
|
||||
# else:
|
||||
# raise SyntaxError('Invalid field type %s' %fieldtype)
|
||||
return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
|
||||
return BaseAdapter.represent(self, obj, fieldtype)
|
||||
|
||||
|
||||
class MSSQL3Adapter(MSSQLAdapter):
|
||||
"""Experimental support for pagination in MSSQL
|
||||
|
||||
Requires MSSQL >= 2005, uses `ROW_NUMBER()`
|
||||
"""
|
||||
|
||||
types = {
|
||||
'boolean': 'BIT',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'VARCHAR(MAX)',
|
||||
'json': 'VARCHAR(MAX)',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'IMAGE',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATETIME',
|
||||
'time': 'TIME(7)',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'INT IDENTITY PRIMARY KEY',
|
||||
'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'VARCHAR(MAX)',
|
||||
'list:string': 'VARCHAR(MAX)',
|
||||
'list:reference': 'VARCHAR(MAX)',
|
||||
'geometry': 'geometry',
|
||||
'geography': 'geography',
|
||||
'big-id': 'BIGINT IDENTITY PRIMARY KEY',
|
||||
'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
if lmin == 0:
|
||||
sql_s += ' TOP %i' % lmax
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
lmin += 1
|
||||
sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
|
||||
sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
|
||||
sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
|
||||
sql_f_inner = [f for f in sql_f.split(',')]
|
||||
sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
|
||||
sql_f_iproxy = ', '.join(sql_f_iproxy)
|
||||
sql_f_oproxy = ', '.join(sql_f_outer)
|
||||
return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
|
||||
def rowslice(self,rows,minimum=0,maximum=None):
|
||||
return rows
|
||||
|
||||
|
||||
class MSSQL4Adapter(MSSQLAdapter):
|
||||
"""Support for "native" pagination
|
||||
|
||||
Requires MSSQL >= 2012, uses `OFFSET ... ROWS ... FETCH NEXT ... ROWS ONLY`
|
||||
"""
|
||||
|
||||
types = {
|
||||
'boolean': 'BIT',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'VARCHAR(MAX)',
|
||||
'json': 'VARCHAR(MAX)',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'IMAGE',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATETIME',
|
||||
'time': 'TIME(7)',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'INT IDENTITY PRIMARY KEY',
|
||||
'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'VARCHAR(MAX)',
|
||||
'list:string': 'VARCHAR(MAX)',
|
||||
'list:reference': 'VARCHAR(MAX)',
|
||||
'geometry': 'geometry',
|
||||
'geography': 'geography',
|
||||
'big-id': 'BIGINT IDENTITY PRIMARY KEY',
|
||||
'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
if lmin == 0:
|
||||
#top is still slightly faster, especially because
|
||||
#web2py's default to fetch references is to not specify
|
||||
#an orderby clause
|
||||
sql_s += ' TOP %i' % lmax
|
||||
else:
|
||||
if not sql_o:
|
||||
#if there is no orderby, we can't use the brand new statements
|
||||
#that being said, developer chose its own poison, so be it random
|
||||
sql_o += ' ORDER BY %s' % self.RANDOM()
|
||||
sql_o += ' OFFSET %i ROWS FETCH NEXT %i ROWS ONLY' % (lmin, lmax - lmin)
|
||||
return 'SELECT %s %s FROM %s%s%s;' % \
|
||||
(sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def rowslice(self,rows,minimum=0,maximum=None):
|
||||
return rows
|
||||
|
||||
|
||||
class MSSQL2Adapter(MSSQLAdapter):
|
||||
drivers = ('pyodbc',)
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'NVARCHAR(%(length)s)',
|
||||
'text': 'NTEXT',
|
||||
'json': 'NTEXT',
|
||||
'password': 'NVARCHAR(%(length)s)',
|
||||
'blob': 'IMAGE',
|
||||
'upload': 'NVARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATETIME',
|
||||
'time': 'CHAR(8)',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'INT IDENTITY PRIMARY KEY',
|
||||
'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'NTEXT',
|
||||
'list:string': 'NTEXT',
|
||||
'list:reference': 'NTEXT',
|
||||
'big-id': 'BIGINT IDENTITY PRIMARY KEY',
|
||||
'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
value = BaseAdapter.represent(self, obj, fieldtype)
|
||||
if fieldtype in ('string','text', 'json') and value[:1]=="'":
|
||||
value = 'N'+value
|
||||
return value
|
||||
|
||||
def execute(self,a):
|
||||
return self.log_execute(a.decode('utf8'))
|
||||
|
||||
|
||||
class VerticaAdapter(MSSQLAdapter):
|
||||
drivers = ('pyodbc',)
|
||||
T_SEP = ' '
|
||||
|
||||
types = {
|
||||
'boolean': 'BOOLEAN',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'BYTEA',
|
||||
'json': 'VARCHAR(%(length)s)',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BYTEA',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'DOUBLE PRECISION',
|
||||
'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'IDENTITY',
|
||||
'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'BYTEA',
|
||||
'list:string': 'BYTEA',
|
||||
'list:reference': 'BYTEA',
|
||||
'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
|
||||
def EXTRACT(self, first, what):
|
||||
return "DATE_PART('%s', TIMESTAMP %s)" % (what, self.expand(first))
|
||||
|
||||
def _truncate(self, table, mode=''):
|
||||
tablename = table._tablename
|
||||
return ['TRUNCATE %s %s;' % (tablename, mode or '')]
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
|
||||
return 'SELECT %s %s FROM %s%s%s;' % \
|
||||
(sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def lastrowid(self,table):
|
||||
self.execute('SELECT LAST_INSERT_ID();')
|
||||
return long(self.cursor.fetchone()[0])
|
||||
|
||||
def execute(self, a):
|
||||
return self.log_execute(a)
|
||||
|
||||
|
||||
class SybaseAdapter(MSSQLAdapter):
|
||||
drivers = ('Sybase',)
|
||||
|
||||
types = {
|
||||
'boolean': 'BIT',
|
||||
'string': 'CHAR VARYING(%(length)s)',
|
||||
'text': 'TEXT',
|
||||
'json': 'TEXT',
|
||||
'password': 'CHAR VARYING(%(length)s)',
|
||||
'blob': 'IMAGE',
|
||||
'upload': 'CHAR VARYING(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATETIME',
|
||||
'time': 'CHAR(8)',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'INT IDENTITY PRIMARY KEY',
|
||||
'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'TEXT',
|
||||
'list:string': 'TEXT',
|
||||
'list:reference': 'TEXT',
|
||||
'geometry': 'geometry',
|
||||
'geography': 'geography',
|
||||
'big-id': 'BIGINT IDENTITY PRIMARY KEY',
|
||||
'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, srid=4326,
|
||||
after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "sybase"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.srid = srid
|
||||
self.find_or_make_work_folder()
|
||||
# ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8
|
||||
ruri = uri.split('://',1)[1]
|
||||
if '@' not in ruri:
|
||||
try:
|
||||
m = self.REGEX_DSN.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
'Parsing uri string(%s) has no result' % self.uri)
|
||||
dsn = m.group('dsn')
|
||||
if not dsn:
|
||||
raise SyntaxError('DSN required')
|
||||
except SyntaxError:
|
||||
e = sys.exc_info()[1]
|
||||
LOGGER.error('NdGpatch error')
|
||||
raise e
|
||||
else:
|
||||
m = self.REGEX_URI.match(uri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
"Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
port = m.group('port') or '1433'
|
||||
|
||||
dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
|
||||
|
||||
driver_args.update(user = credential_decoder(user),
|
||||
password = credential_decoder(password))
|
||||
|
||||
def connector(dsn=dsn,driver_args=driver_args):
|
||||
return self.driver.connect(dsn,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
@@ -0,0 +1,140 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from ..helpers.methods import varquote_aux
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class MySQLAdapter(BaseAdapter):
|
||||
drivers = ('MySQLdb','pymysql', 'mysqlconnector')
|
||||
|
||||
commit_on_alter_table = True
|
||||
support_distributed_transaction = True
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'LONGTEXT',
|
||||
'json': 'LONGTEXT',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'LONGBLOB',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'DOUBLE',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'DATETIME',
|
||||
'id': 'INT AUTO_INCREMENT NOT NULL',
|
||||
'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'LONGTEXT',
|
||||
'list:string': 'LONGTEXT',
|
||||
'list:reference': 'LONGTEXT',
|
||||
'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
|
||||
'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT `FK_%(constraint_name)s` FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
QUOTE_TEMPLATE = "`%s`"
|
||||
|
||||
def varquote(self,name):
|
||||
return varquote_aux(name,'`%s`')
|
||||
|
||||
def RANDOM(self):
|
||||
return 'RAND()'
|
||||
|
||||
def SUBSTRING(self,field,parameters):
|
||||
return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
|
||||
parameters[0], parameters[1])
|
||||
|
||||
def EPOCH(self, first):
|
||||
return "UNIX_TIMESTAMP(%s)" % self.expand(first)
|
||||
|
||||
def CONCAT(self, *items):
|
||||
return 'CONCAT(%s)' % ','.join(self.expand(x,'string') for x in items)
|
||||
|
||||
def REGEXP(self,first,second):
|
||||
return '(%s REGEXP %s)' % (self.expand(first),
|
||||
self.expand(second,'string'))
|
||||
|
||||
def CAST(self, first, second):
|
||||
if second=='LONGTEXT': second = 'CHAR'
|
||||
return 'CAST(%s AS %s)' % (first, second)
|
||||
|
||||
def _drop(self,table,mode):
|
||||
# breaks db integrity but without this mysql does not drop table
|
||||
table_rname = table.sqlsafe
|
||||
return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table_rname,
|
||||
'SET FOREIGN_KEY_CHECKS=1;']
|
||||
|
||||
def _insert_empty(self, table):
|
||||
return 'INSERT INTO %s VALUES (DEFAULT);' % (table.sqlsafe)
|
||||
|
||||
def distributed_transaction_begin(self,key):
|
||||
self.execute('XA START;')
|
||||
|
||||
def prepare(self,key):
|
||||
self.execute("XA END;")
|
||||
self.execute("XA PREPARE;")
|
||||
|
||||
def commit_prepared(self,key):
|
||||
self.execute("XA COMMIT;")
|
||||
|
||||
def rollback_prepared(self,key):
|
||||
self.execute("XA ROLLBACK;")
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "mysql"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError(
|
||||
"Invalid URI string in DAL: %s" % self.uri)
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
port = int(m.group('port') or '3306')
|
||||
charset = m.group('charset') or 'utf8'
|
||||
driver_args.update(db=db,
|
||||
user=credential_decoder(user),
|
||||
passwd=credential_decoder(password),
|
||||
host=host,
|
||||
port=port,
|
||||
charset=charset)
|
||||
|
||||
|
||||
def connector(driver_args=driver_args):
|
||||
return self.driver.connect(**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.execute('SET FOREIGN_KEY_CHECKS=1;')
|
||||
self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
|
||||
|
||||
def lastrowid(self,table):
|
||||
self.execute('select last_insert_id();')
|
||||
return int(self.cursor.fetchone()[0])
|
||||
@@ -0,0 +1,191 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import base64
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .._load import cx_Oracle
|
||||
from .base import BaseAdapter
|
||||
|
||||
class OracleAdapter(BaseAdapter):
|
||||
drivers = ('cx_Oracle',)
|
||||
|
||||
commit_on_alter_table = False
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR2(%(length)s)',
|
||||
'text': 'CLOB',
|
||||
'json': 'CLOB',
|
||||
'password': 'VARCHAR2(%(length)s)',
|
||||
'blob': 'CLOB',
|
||||
'upload': 'VARCHAR2(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'NUMBER',
|
||||
'float': 'FLOAT',
|
||||
'double': 'BINARY_DOUBLE',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'CHAR(8)',
|
||||
'datetime': 'DATE',
|
||||
'id': 'NUMBER PRIMARY KEY',
|
||||
'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'CLOB',
|
||||
'list:string': 'CLOB',
|
||||
'list:reference': 'CLOB',
|
||||
'big-id': 'NUMBER PRIMARY KEY',
|
||||
'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
|
||||
def trigger_name(self,tablename):
|
||||
return '%s_trigger' % tablename
|
||||
|
||||
def LEFT_JOIN(self):
|
||||
return 'LEFT OUTER JOIN'
|
||||
|
||||
def RANDOM(self):
|
||||
return 'dbms_random.value'
|
||||
|
||||
def NOT_NULL(self,default,field_type):
|
||||
return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
|
||||
|
||||
def REGEXP(self, first, second):
|
||||
return 'REGEXP_LIKE(%s, %s)' % (self.expand(first),
|
||||
self.expand(second, 'string'))
|
||||
|
||||
def _drop(self,table,mode):
|
||||
sequence_name = table._sequence_name
|
||||
return ['DROP TABLE %s %s;' % (table.sqlsafe, mode), 'DROP SEQUENCE %s;' % sequence_name]
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
if len(sql_w) > 1:
|
||||
sql_w_row = sql_w + ' AND w_row > %i' % lmin
|
||||
else:
|
||||
sql_w_row = 'WHERE w_row > %i' % lmin
|
||||
return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def constraint_name(self, tablename, fieldname):
|
||||
constraint_name = BaseAdapter.constraint_name(self, tablename, fieldname)
|
||||
if len(constraint_name)>30:
|
||||
constraint_name = '%s_%s__constraint' % (tablename[:10], fieldname[:7])
|
||||
return constraint_name
|
||||
|
||||
def represent_exceptions(self, obj, fieldtype):
|
||||
if fieldtype == 'blob':
|
||||
obj = base64.b64encode(str(obj))
|
||||
return ":CLOB('%s')" % obj
|
||||
elif fieldtype == 'date':
|
||||
if isinstance(obj, (datetime.date, datetime.datetime)):
|
||||
obj = obj.isoformat()[:10]
|
||||
else:
|
||||
obj = str(obj)
|
||||
return "to_date('%s','yyyy-mm-dd')" % obj
|
||||
elif fieldtype == 'datetime':
|
||||
if isinstance(obj, datetime.datetime):
|
||||
obj = obj.isoformat()[:19].replace('T',' ')
|
||||
elif isinstance(obj, datetime.date):
|
||||
obj = obj.isoformat()[:10]+' 00:00:00'
|
||||
else:
|
||||
obj = str(obj)
|
||||
return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
|
||||
return None
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "oracle"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
if not 'threaded' in driver_args:
|
||||
driver_args['threaded']=True
|
||||
def connector(uri=ruri,driver_args=driver_args):
|
||||
return self.driver.connect(uri,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
|
||||
self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
|
||||
|
||||
oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
|
||||
|
||||
def execute(self, command, args=None):
|
||||
args = args or []
|
||||
i = 1
|
||||
while True:
|
||||
m = self.oracle_fix.match(command)
|
||||
if not m:
|
||||
break
|
||||
command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
|
||||
args.append(m.group('clob')[6:-2].replace("''", "'"))
|
||||
i += 1
|
||||
if command[-1:]==';':
|
||||
command = command[:-1]
|
||||
return self.log_execute(command, args)
|
||||
|
||||
def create_sequence_and_triggers(self, query, table, **args):
|
||||
tablename = table._rname or table._tablename
|
||||
id_name = table._id.name
|
||||
sequence_name = table._sequence_name
|
||||
trigger_name = table._trigger_name
|
||||
self.execute(query)
|
||||
self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
|
||||
self.execute("""
|
||||
CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
|
||||
DECLARE
|
||||
curr_val NUMBER;
|
||||
diff_val NUMBER;
|
||||
PRAGMA autonomous_transaction;
|
||||
BEGIN
|
||||
IF :NEW.%(id)s IS NOT NULL THEN
|
||||
EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
|
||||
diff_val := :NEW.%(id)s - curr_val - 1;
|
||||
IF diff_val != 0 THEN
|
||||
EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
|
||||
EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
|
||||
EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
|
||||
END IF;
|
||||
END IF;
|
||||
SELECT %(sequence_name)s.nextval INTO :NEW.%(id)s FROM DUAL;
|
||||
END;
|
||||
""" % dict(trigger_name=trigger_name, tablename=tablename,
|
||||
sequence_name=sequence_name,id=id_name))
|
||||
|
||||
def lastrowid(self,table):
|
||||
sequence_name = table._sequence_name
|
||||
self.execute('SELECT %s.currval FROM dual;' % sequence_name)
|
||||
return long(self.cursor.fetchone()[0])
|
||||
|
||||
#def parse_value(self, value, field_type, blob_decode=True):
|
||||
# if blob_decode and isinstance(value, cx_Oracle.LOB):
|
||||
# try:
|
||||
# value = value.read()
|
||||
# except self.driver.ProgrammingError:
|
||||
# # After a subsequent fetch the LOB value is not valid anymore
|
||||
# pass
|
||||
# return BaseAdapter.parse_value(self, value, field_type, blob_decode)
|
||||
|
||||
def _fetchall(self):
|
||||
if any(x[1]==cx_Oracle.LOB for x in self.cursor.description):
|
||||
return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
|
||||
for c in r]) for r in self.cursor]
|
||||
else:
|
||||
return self.cursor.fetchall()
|
||||
|
||||
def sqlsafe_table(self, tablename, ot=None):
|
||||
if ot is not None:
|
||||
return (self.QUOTE_TEMPLATE + ' ' \
|
||||
+ self.QUOTE_TEMPLATE) % (ot, tablename)
|
||||
return self.QUOTE_TEMPLATE % tablename
|
||||
@@ -0,0 +1,421 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from .._load import psycopg2_adapt
|
||||
from .._globals import IDENTITY, LOGGER
|
||||
from ..helpers.methods import varquote_aux
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class PostgreSQLAdapter(BaseAdapter):
|
||||
drivers = ('psycopg2','pg8000')
|
||||
|
||||
QUOTE_TEMPLATE = '"%s"'
|
||||
|
||||
support_distributed_transaction = True
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'TEXT',
|
||||
'json': 'TEXT',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BYTEA',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INTEGER',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT8',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'TIMESTAMP',
|
||||
'id': 'SERIAL PRIMARY KEY',
|
||||
'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'TEXT',
|
||||
'list:string': 'TEXT',
|
||||
'list:reference': 'TEXT',
|
||||
'geometry': 'GEOMETRY',
|
||||
'geography': 'GEOGRAPHY',
|
||||
'big-id': 'BIGSERIAL PRIMARY KEY',
|
||||
'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'reference TFK': ' CONSTRAINT "FK_%(foreign_table)s_PK" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
|
||||
|
||||
}
|
||||
|
||||
|
||||
def varquote(self,name):
|
||||
return varquote_aux(name,'"%s"')
|
||||
|
||||
def adapt(self,obj):
|
||||
if self.driver_name == 'psycopg2':
|
||||
return psycopg2_adapt(obj).getquoted()
|
||||
elif self.driver_name == 'pg8000':
|
||||
return "'%s'" % str(obj).replace("%","%%").replace("'","''")
|
||||
else:
|
||||
return "'%s'" % str(obj).replace("'","''")
|
||||
|
||||
def sequence_name(self,table):
|
||||
return self.QUOTE_TEMPLATE % (table + '_id_seq')
|
||||
|
||||
def RANDOM(self):
|
||||
return 'RANDOM()'
|
||||
|
||||
def ADD(self, first, second):
|
||||
t = first.type
|
||||
if t in ('text','string','password', 'json', 'upload','blob'):
|
||||
return '(%s || %s)' % (self.expand(first), self.expand(second, t))
|
||||
else:
|
||||
return '(%s + %s)' % (self.expand(first), self.expand(second, t))
|
||||
|
||||
def distributed_transaction_begin(self,key):
|
||||
return
|
||||
|
||||
def prepare(self,key):
|
||||
self.execute("PREPARE TRANSACTION '%s';" % key)
|
||||
|
||||
def commit_prepared(self,key):
|
||||
self.execute("COMMIT PREPARED '%s';" % key)
|
||||
|
||||
def rollback_prepared(self,key):
|
||||
self.execute("ROLLBACK PREPARED '%s';" % key)
|
||||
|
||||
def create_sequence_and_triggers(self, query, table, **args):
|
||||
# following lines should only be executed if table._sequence_name does not exist
|
||||
# self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
|
||||
# self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
|
||||
# % (table._tablename, table._fieldname, table._sequence_name))
|
||||
self.execute(query)
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, srid=4326,
|
||||
after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "postgres"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.srid = srid
|
||||
self.find_or_make_work_folder()
|
||||
self._last_insert = None # for INSERT ... RETURNING ID
|
||||
self.TRUE_exp = 'TRUE'
|
||||
self.FALSE_exp = 'FALSE'
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError("Invalid URI string in DAL")
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
port = m.group('port') or '5432'
|
||||
sslmode = m.group('sslmode')
|
||||
if sslmode:
|
||||
msg = ("dbname='%s' user='%s' host='%s' "
|
||||
"port=%s password='%s' sslmode='%s'") \
|
||||
% (db, user, host, port, password, sslmode)
|
||||
else:
|
||||
msg = ("dbname='%s' user='%s' host='%s' "
|
||||
"port=%s password='%s'") \
|
||||
% (db, user, host, port, password)
|
||||
# choose diver according uri
|
||||
if self.driver:
|
||||
self.__version__ = "%s %s" % (self.driver.__name__,
|
||||
self.driver.__version__)
|
||||
else:
|
||||
self.__version__ = None
|
||||
def connector(msg=msg,driver_args=driver_args):
|
||||
return self.driver.connect(msg,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.connection.set_client_encoding('UTF8')
|
||||
self.execute("SET standard_conforming_strings=on;")
|
||||
self.try_json()
|
||||
|
||||
def _insert(self, table, fields):
|
||||
table_rname = table.sqlsafe
|
||||
if fields:
|
||||
keys = ','.join(f.sqlsafe_name for f, v in fields)
|
||||
values = ','.join(self.expand(v, f.type) for f, v in fields)
|
||||
if table._id:
|
||||
self._last_insert = (table._id, 1)
|
||||
return 'INSERT INTO %s(%s) VALUES (%s) RETURNING %s;' % (
|
||||
table_rname, keys, values, table._id.name)
|
||||
else:
|
||||
self._last_insert = None
|
||||
return 'INSERT INTO %s(%s) VALUES (%s);' % (table_rname, keys, values)
|
||||
else:
|
||||
self._last_insert
|
||||
return self._insert_empty(table)
|
||||
|
||||
def lastrowid(self, table=None):
|
||||
if self._last_insert:
|
||||
return int(self.cursor.fetchone()[0])
|
||||
else:
|
||||
self.execute("select lastval()")
|
||||
return int(self.cursor.fetchone()[0])
|
||||
|
||||
def try_json(self):
|
||||
# check JSON data type support
|
||||
# (to be added to after_connection)
|
||||
|
||||
# until pg8000 supports json, leave this commented
|
||||
#if self.driver_name == "pg8000":
|
||||
# supports_json = self.connection.server_version >= "9.2.0"
|
||||
|
||||
if (self.driver_name == "psycopg2" and
|
||||
self.driver.__version__ >= "2.0.12"):
|
||||
supports_json = self.connection.server_version >= 90200
|
||||
elif self.driver_name == "zxJDBC":
|
||||
supports_json = self.connection.dbversion >= "9.2.0"
|
||||
else:
|
||||
supports_json = None
|
||||
if supports_json:
|
||||
self.types["json"] = "JSON"
|
||||
if (self.driver_name == "psycopg2" and
|
||||
self.driver.__version__ >= '2.5.0'):
|
||||
self.driver_auto_json = ['loads']
|
||||
else:
|
||||
LOGGER.debug("Your database version does not support the JSON"
|
||||
" data type (using TEXT instead)")
|
||||
|
||||
def LIKE(self,first,second):
|
||||
args = (self.expand(first), self.expand(second,'string'))
|
||||
if not first.type in ('string', 'text', 'json'):
|
||||
return '(%s LIKE %s)' % (
|
||||
self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
|
||||
else:
|
||||
return '(%s LIKE %s)' % args
|
||||
|
||||
def ILIKE(self,first,second):
|
||||
args = (self.expand(first), self.expand(second,'string'))
|
||||
if not first.type in ('string', 'text', 'json'):
|
||||
return '(%s LIKE %s)' % (
|
||||
self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
|
||||
else:
|
||||
return '(%s ILIKE %s)' % args
|
||||
|
||||
def REGEXP(self,first,second):
|
||||
return '(%s ~ %s)' % (self.expand(first),
|
||||
self.expand(second,'string'))
|
||||
|
||||
# GIS functions
|
||||
|
||||
def ST_ASGEOJSON(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_AsGeoJSON.html
|
||||
"""
|
||||
return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
|
||||
self.expand(first), second['precision'], second['options'])
|
||||
|
||||
def ST_ASTEXT(self, first):
|
||||
"""
|
||||
http://postgis.org/docs/ST_AsText.html
|
||||
"""
|
||||
return 'ST_AsText(%s)' %(self.expand(first))
|
||||
|
||||
def ST_X(self, first):
|
||||
"""
|
||||
http://postgis.org/docs/ST_X.html
|
||||
"""
|
||||
return 'ST_X(%s)' %(self.expand(first))
|
||||
|
||||
def ST_Y(self, first):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Y.html
|
||||
"""
|
||||
return 'ST_Y(%s)' %(self.expand(first))
|
||||
|
||||
def ST_CONTAINS(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Contains.html
|
||||
"""
|
||||
return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_DISTANCE(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Distance.html
|
||||
"""
|
||||
return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_EQUALS(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Equals.html
|
||||
"""
|
||||
return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_INTERSECTS(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Intersects.html
|
||||
"""
|
||||
return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_OVERLAPS(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Overlaps.html
|
||||
"""
|
||||
return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_SIMPLIFY(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Simplify.html
|
||||
"""
|
||||
return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
|
||||
|
||||
def ST_TOUCHES(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Touches.html
|
||||
"""
|
||||
return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_WITHIN(self, first, second):
|
||||
"""
|
||||
http://postgis.org/docs/ST_Within.html
|
||||
"""
|
||||
return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
|
||||
|
||||
def ST_DWITHIN(self, first, (second, third)):
|
||||
"""
|
||||
http://postgis.org/docs/ST_DWithin.html
|
||||
"""
|
||||
return 'ST_DWithin(%s,%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type),
|
||||
self.expand(third, 'double'))
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
field_is_type = fieldtype.startswith
|
||||
if field_is_type('geo'):
|
||||
srid = 4326 # postGIS default srid for geometry
|
||||
geotype, parms = fieldtype[:-1].split('(')
|
||||
parms = parms.split(',')
|
||||
if len(parms) >= 2:
|
||||
schema, srid = parms[:2]
|
||||
if field_is_type('geometry'):
|
||||
value = "ST_GeomFromText('%s',%s)" %(obj, srid)
|
||||
elif field_is_type('geography'):
|
||||
value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
|
||||
# else:
|
||||
# raise SyntaxError('Invalid field type %s' %fieldtype)
|
||||
return value
|
||||
return BaseAdapter.represent(self, obj, fieldtype)
|
||||
|
||||
def _drop(self, table, mode='restrict'):
|
||||
if mode not in ['restrict', 'cascade', '']:
|
||||
raise ValueError('Invalid mode: %s' % mode)
|
||||
return ['DROP TABLE ' + table.sqlsafe + ' ' + str(mode) + ';']
|
||||
|
||||
class NewPostgreSQLAdapter(PostgreSQLAdapter):
|
||||
drivers = ('psycopg2','pg8000')
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'TEXT',
|
||||
'json': 'TEXT',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BYTEA',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INTEGER',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'FLOAT8',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'TIMESTAMP',
|
||||
'id': 'SERIAL PRIMARY KEY',
|
||||
'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'BIGINT[]',
|
||||
'list:string': 'TEXT[]',
|
||||
'list:reference': 'BIGINT[]',
|
||||
'geometry': 'GEOMETRY',
|
||||
'geography': 'GEOGRAPHY',
|
||||
'big-id': 'BIGSERIAL PRIMARY KEY',
|
||||
'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def parse_list_integers(self, value, field_type):
|
||||
return value
|
||||
|
||||
def parse_list_references(self, value, field_type):
|
||||
return [self.parse_reference(r, field_type[5:]) for r in value]
|
||||
|
||||
def parse_list_strings(self, value, field_type):
|
||||
return value
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
field_is_type = fieldtype.startswith
|
||||
if field_is_type('list:'):
|
||||
if not obj:
|
||||
obj = []
|
||||
elif not isinstance(obj, (list, tuple)):
|
||||
obj = [obj]
|
||||
if field_is_type('list:string'):
|
||||
obj = map(str,obj)
|
||||
else:
|
||||
obj = map(int,obj)
|
||||
return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
|
||||
return BaseAdapter.represent(self, obj, fieldtype)
|
||||
|
||||
|
||||
class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
|
||||
drivers = ('zxJDBC',)
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None ):
|
||||
self.db = db
|
||||
self.dbengine = "postgres"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError("Invalid URI string in DAL")
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
port = m.group('port') or '5432'
|
||||
msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
|
||||
def connector(msg=msg,driver_args=driver_args):
|
||||
return self.driver.connect(*msg,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.connection.set_client_encoding('UTF8')
|
||||
self.execute('BEGIN;')
|
||||
self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
|
||||
self.try_json()
|
||||
@@ -0,0 +1,97 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class SAPDBAdapter(BaseAdapter):
|
||||
drivers = ('sapdb',)
|
||||
|
||||
support_distributed_transaction = False
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'LONG',
|
||||
'json': 'LONG',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'LONG',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'FLOAT',
|
||||
'double': 'DOUBLE PRECISION',
|
||||
'decimal': 'FIXED(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'TIMESTAMP',
|
||||
'id': 'INT PRIMARY KEY',
|
||||
'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
'list:integer': 'LONG',
|
||||
'list:string': 'LONG',
|
||||
'list:reference': 'LONG',
|
||||
'big-id': 'BIGINT PRIMARY KEY',
|
||||
'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
|
||||
}
|
||||
|
||||
def sequence_name(self,table):
|
||||
return (self.QUOTE_TEMPLATE + '_id_Seq') % table
|
||||
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
if len(sql_w) > 1:
|
||||
sql_w_row = sql_w + ' AND w_row > %i' % lmin
|
||||
else:
|
||||
sql_w_row = 'WHERE w_row > %i' % lmin
|
||||
return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def create_sequence_and_triggers(self, query, table, **args):
|
||||
# following lines should only be executed if table._sequence_name does not exist
|
||||
self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
|
||||
self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
|
||||
% (table._tablename, table._id.name, table._sequence_name))
|
||||
self.execute(query)
|
||||
|
||||
REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
|
||||
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "sapdb"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://',1)[1]
|
||||
m = self.REGEX_URI.match(ruri)
|
||||
if not m:
|
||||
raise SyntaxError("Invalid URI string in DAL")
|
||||
user = credential_decoder(m.group('user'))
|
||||
if not user:
|
||||
raise SyntaxError('User required')
|
||||
password = credential_decoder(m.group('password'))
|
||||
if not password:
|
||||
password = ''
|
||||
host = m.group('host')
|
||||
if not host:
|
||||
raise SyntaxError('Host name required')
|
||||
db = m.group('db')
|
||||
if not db:
|
||||
raise SyntaxError('Database name required')
|
||||
def connector(user=user, password=password, database=db,
|
||||
host=host, driver_args=driver_args):
|
||||
return self.driver.Connection(user, password, database,
|
||||
host, **driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def lastrowid(self,table):
|
||||
self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
|
||||
return long(self.cursor.fetchone()[0])
|
||||
@@ -0,0 +1,280 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import datetime
|
||||
import locale
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .._compat import PY2, pjoin
|
||||
from .._globals import IDENTITY
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class SQLiteAdapter(BaseAdapter):
|
||||
drivers = ('sqlite2','sqlite3')
|
||||
|
||||
can_select_for_update = None # support ourselves with BEGIN TRANSACTION
|
||||
|
||||
def EXTRACT(self,field,what):
|
||||
return "web2py_extract('%s',%s)" % (what, self.expand(field))
|
||||
|
||||
@staticmethod
|
||||
def web2py_extract(lookup, s):
|
||||
table = {
|
||||
'year': (0, 4),
|
||||
'month': (5, 7),
|
||||
'day': (8, 10),
|
||||
'hour': (11, 13),
|
||||
'minute': (14, 16),
|
||||
'second': (17, 19),
|
||||
}
|
||||
try:
|
||||
if lookup != 'epoch':
|
||||
(i, j) = table[lookup]
|
||||
return int(s[i:j])
|
||||
else:
|
||||
return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
|
||||
except:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def web2py_regexp(expression, item):
|
||||
return re.compile(expression).search(item) is not None
|
||||
|
||||
def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "sqlite"
|
||||
self.uri = uri
|
||||
self.adapter_args = adapter_args
|
||||
if do_connect: self.find_driver(adapter_args)
|
||||
self.pool_size = 0
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
path_encoding = sys.getfilesystemencoding() \
|
||||
or locale.getdefaultlocale()[1] or 'utf8'
|
||||
if uri.startswith('sqlite:memory'):
|
||||
self.dbpath = ':memory:'
|
||||
else:
|
||||
self.dbpath = uri.split('://',1)[1]
|
||||
if self.dbpath[0] != '/':
|
||||
if PY2:
|
||||
self.dbpath = pjoin(
|
||||
self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
|
||||
else:
|
||||
self.dbpath = pjoin(self.folder, self.dbpath)
|
||||
if not 'check_same_thread' in driver_args:
|
||||
driver_args['check_same_thread'] = False
|
||||
if not 'detect_types' in driver_args and do_connect:
|
||||
driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
|
||||
def connector(dbpath=self.dbpath, driver_args=driver_args):
|
||||
return self.driver.Connection(dbpath, **driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.connection.create_function('web2py_extract', 2,
|
||||
SQLiteAdapter.web2py_extract)
|
||||
self.connection.create_function("REGEXP", 2,
|
||||
SQLiteAdapter.web2py_regexp)
|
||||
|
||||
if self.adapter_args.get('foreign_keys',True):
|
||||
self.execute('PRAGMA foreign_keys=ON;')
|
||||
|
||||
def _truncate(self, table, mode=''):
|
||||
tablename = table._tablename
|
||||
return ['DELETE FROM %s;' % tablename,
|
||||
"DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
|
||||
|
||||
def lastrowid(self, table):
|
||||
return self.cursor.lastrowid
|
||||
|
||||
def REGEXP(self,first,second):
|
||||
return '(%s REGEXP %s)' % (self.expand(first),
|
||||
self.expand(second,'string'))
|
||||
|
||||
def delete(self, tablename, query):
|
||||
# SQLite requires its own delete to handle CASCADE
|
||||
db = self.db
|
||||
table = db[tablename]
|
||||
deleted = [x[table._id.name] for x in db(query).select(table._id)]
|
||||
|
||||
counter = super(SQLiteAdapter, self).delete(tablename, query)
|
||||
|
||||
if counter:
|
||||
for field in table._referenced_by:
|
||||
if field.type == 'reference '+ tablename \
|
||||
and field.ondelete == 'CASCADE':
|
||||
db(field.belongs(deleted)).delete()
|
||||
|
||||
return counter
|
||||
|
||||
def select(self, query, fields, attributes):
|
||||
"""
|
||||
Simulate `SELECT ... FOR UPDATE` with `BEGIN IMMEDIATE TRANSACTION`.
|
||||
Note that the entire database, rather than one record, is locked
|
||||
(it will be locked eventually anyway by the following UPDATE).
|
||||
"""
|
||||
if attributes.get('for_update', False) and not 'cache' in attributes:
|
||||
self.execute('BEGIN IMMEDIATE TRANSACTION;')
|
||||
return super(SQLiteAdapter, self).select(query, fields, attributes)
|
||||
|
||||
|
||||
SPATIALLIBS = {
|
||||
'Windows':'libspatialite',
|
||||
'Linux':'libspatialite.so',
|
||||
'Darwin':'libspatialite.dylib'
|
||||
}
|
||||
|
||||
class SpatiaLiteAdapter(SQLiteAdapter):
|
||||
drivers = ('sqlite3','sqlite2')
|
||||
|
||||
types = copy.copy(BaseAdapter.types)
|
||||
types.update(geometry='GEOMETRY')
|
||||
|
||||
def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, srid=4326, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "spatialite"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args)
|
||||
self.pool_size = 0
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
self.srid = srid
|
||||
path_encoding = sys.getfilesystemencoding() \
|
||||
or locale.getdefaultlocale()[1] or 'utf8'
|
||||
if uri.startswith('spatialite:memory'):
|
||||
self.dbpath = ':memory:'
|
||||
else:
|
||||
self.dbpath = uri.split('://',1)[1]
|
||||
if self.dbpath[0] != '/':
|
||||
self.dbpath = pjoin(
|
||||
self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
|
||||
if not 'check_same_thread' in driver_args:
|
||||
driver_args['check_same_thread'] = False
|
||||
if not 'detect_types' in driver_args and do_connect:
|
||||
driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
|
||||
def connector(dbpath=self.dbpath, driver_args=driver_args):
|
||||
return self.driver.Connection(dbpath, **driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
self.connection.enable_load_extension(True)
|
||||
# for Windows, rename libspatialite-2.dll to libspatialite.dll
|
||||
# Linux uses libspatialite.so
|
||||
# Mac OS X uses libspatialite.dylib
|
||||
libspatialite = SPATIALLIBS[platform.system()]
|
||||
self.execute(r'SELECT load_extension("%s");' % libspatialite)
|
||||
|
||||
self.connection.create_function('web2py_extract', 2,
|
||||
SQLiteAdapter.web2py_extract)
|
||||
self.connection.create_function("REGEXP", 2,
|
||||
SQLiteAdapter.web2py_regexp)
|
||||
|
||||
# GIS functions
|
||||
|
||||
def ST_ASGEOJSON(self, first, second):
|
||||
return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
|
||||
second['precision'], second['options'])
|
||||
|
||||
def ST_ASTEXT(self, first):
|
||||
return 'AsText(%s)' %(self.expand(first))
|
||||
|
||||
def ST_CONTAINS(self, first, second):
|
||||
return 'Contains(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def ST_DISTANCE(self, first, second):
|
||||
return 'Distance(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def ST_EQUALS(self, first, second):
|
||||
return 'Equals(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def ST_INTERSECTS(self, first, second):
|
||||
return 'Intersects(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def ST_OVERLAPS(self, first, second):
|
||||
return 'Overlaps(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def ST_SIMPLIFY(self, first, second):
|
||||
return 'Simplify(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, 'double'))
|
||||
|
||||
def ST_TOUCHES(self, first, second):
|
||||
return 'Touches(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def ST_WITHIN(self, first, second):
|
||||
return 'Within(%s,%s)' %(self.expand(first),
|
||||
self.expand(second, first.type))
|
||||
|
||||
def represent(self, obj, fieldtype):
|
||||
field_is_type = fieldtype.startswith
|
||||
if field_is_type('geo'):
|
||||
srid = 4326 # Spatialite default srid for geometry
|
||||
geotype, parms = fieldtype[:-1].split('(')
|
||||
parms = parms.split(',')
|
||||
if len(parms) >= 2:
|
||||
schema, srid = parms[:2]
|
||||
# if field_is_type('geometry'):
|
||||
value = "ST_GeomFromText('%s',%s)" %(obj, srid)
|
||||
# elif field_is_type('geography'):
|
||||
# value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
|
||||
# else:
|
||||
# raise SyntaxError, 'Invalid field type %s' %fieldtype
|
||||
return value
|
||||
return BaseAdapter.represent(self, obj, fieldtype)
|
||||
|
||||
|
||||
class JDBCSQLiteAdapter(SQLiteAdapter):
|
||||
drivers = ('zxJDBC_sqlite',)
|
||||
|
||||
def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "sqlite"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
path_encoding = sys.getfilesystemencoding() \
|
||||
or locale.getdefaultlocale()[1] or 'utf8'
|
||||
if uri.startswith('sqlite:memory'):
|
||||
self.dbpath = ':memory:'
|
||||
else:
|
||||
self.dbpath = uri.split('://',1)[1]
|
||||
if self.dbpath[0] != '/':
|
||||
self.dbpath = pjoin(
|
||||
self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
|
||||
def connector(dbpath=self.dbpath,driver_args=driver_args):
|
||||
return self.driver.connect(
|
||||
self.driver.getConnection('jdbc:sqlite:'+dbpath),
|
||||
**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def after_connection(self):
|
||||
# FIXME http://www.zentus.com/sqlitejdbc/custom_functions.html for UDFs
|
||||
self.connection.create_function('web2py_extract', 2,
|
||||
SQLiteAdapter.web2py_extract)
|
||||
|
||||
def execute(self, a):
|
||||
return self.log_execute(a)
|
||||
@@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .._globals import IDENTITY
|
||||
from ..connection import ConnectionPool
|
||||
from .base import BaseAdapter
|
||||
|
||||
|
||||
class TeradataAdapter(BaseAdapter):
|
||||
drivers = ('pyodbc',)
|
||||
|
||||
types = {
|
||||
'boolean': 'CHAR(1)',
|
||||
'string': 'VARCHAR(%(length)s)',
|
||||
'text': 'VARCHAR(2000)',
|
||||
'json': 'VARCHAR(4000)',
|
||||
'password': 'VARCHAR(%(length)s)',
|
||||
'blob': 'BLOB',
|
||||
'upload': 'VARCHAR(%(length)s)',
|
||||
'integer': 'INT',
|
||||
'bigint': 'BIGINT',
|
||||
'float': 'REAL',
|
||||
'double': 'DOUBLE',
|
||||
'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
|
||||
'date': 'DATE',
|
||||
'time': 'TIME',
|
||||
'datetime': 'TIMESTAMP',
|
||||
# Modified Constraint syntax for Teradata.
|
||||
# Teradata does not support ON DELETE.
|
||||
'id': 'INT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
|
||||
'reference': 'INT',
|
||||
'list:integer': 'VARCHAR(4000)',
|
||||
'list:string': 'VARCHAR(4000)',
|
||||
'list:reference': 'VARCHAR(4000)',
|
||||
'geometry': 'ST_GEOMETRY', # http://www.info.teradata.com/HTMLPubs/DB_TTU_14_00/index.html#page/Database_Management/B035_1094_111A/ch14.055.160.html
|
||||
'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
|
||||
'big-reference': 'BIGINT',
|
||||
'reference FK': ' REFERENCES %(foreign_key)s',
|
||||
'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
|
||||
}
|
||||
|
||||
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
|
||||
credential_decoder=IDENTITY, driver_args={},
|
||||
adapter_args={}, do_connect=True, after_connection=None):
|
||||
self.db = db
|
||||
self.dbengine = "teradata"
|
||||
self.uri = uri
|
||||
if do_connect: self.find_driver(adapter_args,uri)
|
||||
self.pool_size = pool_size
|
||||
self.folder = folder
|
||||
self.db_codec = db_codec
|
||||
self._after_connection = after_connection
|
||||
self.find_or_make_work_folder()
|
||||
ruri = uri.split('://', 1)[1]
|
||||
def connector(cnxn=ruri,driver_args=driver_args):
|
||||
return self.driver.connect(cnxn,**driver_args)
|
||||
self.connector = connector
|
||||
if do_connect: self.reconnect()
|
||||
|
||||
def close(self,action='commit',really=True):
|
||||
# Teradata does not implicitly close off the cursor
|
||||
# leading to SQL_ACTIVE_STATEMENTS limit errors
|
||||
self.cursor.close()
|
||||
ConnectionPool.close(self, action, really)
|
||||
|
||||
def LEFT_JOIN(self):
|
||||
return 'LEFT OUTER JOIN'
|
||||
|
||||
# Similar to MSSQL, Teradata can't specify a range (for Pageby)
|
||||
def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
|
||||
if limitby:
|
||||
(lmin, lmax) = limitby
|
||||
sql_s += ' TOP %i' % lmax
|
||||
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
|
||||
|
||||
def _truncate(self, table, mode=''):
|
||||
tablename = table._tablename
|
||||
return ['DELETE FROM %s ALL;' % (tablename)]
|
||||
+1095
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
from ._compat import exists
|
||||
from ._globals import GLOBAL_LOCKER, THREAD_LOCAL
|
||||
from .helpers.classes import UseDatabaseStoredFile
|
||||
|
||||
class ConnectionPool(object):
|
||||
|
||||
POOLS = {}
|
||||
check_active_connection = True
|
||||
|
||||
@staticmethod
|
||||
def set_folder(folder):
|
||||
THREAD_LOCAL.folder = folder
|
||||
|
||||
# ## this allows gluon to commit/rollback all dbs in this thread
|
||||
|
||||
def close(self,action='commit',really=True):
|
||||
if action:
|
||||
if callable(action):
|
||||
action(self)
|
||||
else:
|
||||
getattr(self, action)()
|
||||
# ## if you want pools, recycle this connection
|
||||
if self.pool_size:
|
||||
GLOBAL_LOCKER.acquire()
|
||||
pool = ConnectionPool.POOLS[self.uri]
|
||||
if len(pool) < self.pool_size:
|
||||
pool.append(self.connection)
|
||||
really = False
|
||||
GLOBAL_LOCKER.release()
|
||||
if really:
|
||||
self.close_connection()
|
||||
self.connection = None
|
||||
|
||||
@staticmethod
|
||||
def close_all_instances(action):
|
||||
""" to close cleanly databases in a multithreaded environment """
|
||||
dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
|
||||
for db_uid, db_group in dbs:
|
||||
for db in db_group:
|
||||
if hasattr(db,'_adapter'):
|
||||
db._adapter.close(action)
|
||||
getattr(THREAD_LOCAL,'db_instances',{}).clear()
|
||||
getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
|
||||
if callable(action):
|
||||
action(None)
|
||||
return
|
||||
|
||||
def find_or_make_work_folder(self):
|
||||
#this actually does not make the folder. it has to be there
|
||||
self.folder = getattr(THREAD_LOCAL,'folder','')
|
||||
|
||||
if (os.path.isabs(self.folder) and
|
||||
isinstance(self, UseDatabaseStoredFile) and
|
||||
self.folder.startswith(os.getcwd())):
|
||||
self.folder = os.path.relpath(self.folder, os.getcwd())
|
||||
|
||||
# Creating the folder if it does not exist
|
||||
if False and self.folder and not exists(self.folder):
|
||||
os.mkdir(self.folder)
|
||||
|
||||
def after_connection_hook(self):
|
||||
"""Hook for the after_connection parameter"""
|
||||
if callable(self._after_connection):
|
||||
self._after_connection(self)
|
||||
self.after_connection()
|
||||
|
||||
def after_connection(self):
|
||||
#this it is supposed to be overloaded by adapters
|
||||
pass
|
||||
|
||||
def reconnect(self, f=None, cursor=True):
|
||||
"""
|
||||
Defines: `self.connection` and `self.cursor`
|
||||
(if cursor is True)
|
||||
if `self.pool_size>0` it will try pull the connection from the pool
|
||||
if the connection is not active (closed by db server) it will loop
|
||||
if not `self.pool_size` or no active connections in pool makes a new one
|
||||
"""
|
||||
if getattr(self,'connection', None) is not None:
|
||||
return
|
||||
if f is None:
|
||||
f = self.connector
|
||||
|
||||
# if not hasattr(self, "driver") or self.driver is None:
|
||||
# LOGGER.debug("Skipping connection since there's no driver")
|
||||
# return
|
||||
|
||||
if not self.pool_size:
|
||||
self.connection = f()
|
||||
self.cursor = cursor and self.connection.cursor()
|
||||
else:
|
||||
uri = self.uri
|
||||
POOLS = ConnectionPool.POOLS
|
||||
while True:
|
||||
GLOBAL_LOCKER.acquire()
|
||||
if not uri in POOLS:
|
||||
POOLS[uri] = []
|
||||
if POOLS[uri]:
|
||||
self.connection = POOLS[uri].pop()
|
||||
GLOBAL_LOCKER.release()
|
||||
self.cursor = cursor and self.connection.cursor()
|
||||
try:
|
||||
if self.cursor and self.check_active_connection:
|
||||
self.execute('SELECT 1;')
|
||||
break
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
GLOBAL_LOCKER.release()
|
||||
self.connection = f()
|
||||
self.cursor = cursor and self.connection.cursor()
|
||||
break
|
||||
self.after_connection_hook()
|
||||
@@ -0,0 +1,298 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import copy
|
||||
import marshal
|
||||
import struct
|
||||
import traceback
|
||||
|
||||
from .._compat import exists, copyreg
|
||||
from .._globals import LOGGER
|
||||
|
||||
|
||||
class Reference(long):
|
||||
|
||||
def __allocate(self):
|
||||
if not self._record:
|
||||
self._record = self._table[long(self)]
|
||||
if not self._record:
|
||||
raise RuntimeError(
|
||||
"Using a recursive select but encountered a broken reference: %s %d"%(self._table, long(self)))
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key == 'id':
|
||||
return long(self)
|
||||
if key in self._table:
|
||||
self.__allocate()
|
||||
if self._record:
|
||||
return self._record.get(key,None) # to deal with case self.update_record()
|
||||
else:
|
||||
return None
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self.__getattr__(key, default)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key.startswith('_'):
|
||||
long.__setattr__(self, key, value)
|
||||
return
|
||||
self.__allocate()
|
||||
self._record[key] = value
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == 'id':
|
||||
return long(self)
|
||||
self.__allocate()
|
||||
return self._record.get(key, None)
|
||||
|
||||
def __setitem__(self,key,value):
|
||||
self.__allocate()
|
||||
self._record[key] = value
|
||||
|
||||
def Reference_unpickler(data):
|
||||
return marshal.loads(data)
|
||||
|
||||
def Reference_pickler(data):
|
||||
try:
|
||||
marshal_dump = marshal.dumps(long(data))
|
||||
except AttributeError:
|
||||
marshal_dump = 'i%s' % struct.pack('<i', long(data))
|
||||
return (Reference_unpickler, (marshal_dump,))
|
||||
|
||||
copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
|
||||
|
||||
|
||||
class SQLCallableList(list):
|
||||
def __call__(self):
|
||||
return copy.copy(self)
|
||||
|
||||
|
||||
class SQLALL(object):
|
||||
"""
|
||||
Helper class providing a comma-separated string having all the field names
|
||||
(prefixed by table name and '.')
|
||||
|
||||
normally only called from within gluon.dal
|
||||
"""
|
||||
|
||||
def __init__(self, table):
|
||||
self._table = table
|
||||
|
||||
def __str__(self):
|
||||
return ', '.join([str(field) for field in self._table])
|
||||
|
||||
|
||||
class SQLCustomType(object):
|
||||
"""
|
||||
Allows defining of custom SQL types
|
||||
|
||||
Args:
|
||||
type: the web2py type (default = 'string')
|
||||
native: the backend type
|
||||
encoder: how to encode the value to store it in the backend
|
||||
decoder: how to decode the value retrieved from the backend
|
||||
validator: what validators to use ( default = None, will use the
|
||||
default validator for type)
|
||||
|
||||
Example::
|
||||
Define as:
|
||||
|
||||
decimal = SQLCustomType(
|
||||
type ='double',
|
||||
native ='integer',
|
||||
encoder =(lambda x: int(float(x) * 100)),
|
||||
decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
|
||||
)
|
||||
|
||||
db.define_table(
|
||||
'example',
|
||||
Field('value', type=decimal)
|
||||
)
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
type='string',
|
||||
native=None,
|
||||
encoder=None,
|
||||
decoder=None,
|
||||
validator=None,
|
||||
_class=None,
|
||||
):
|
||||
|
||||
self.type = type
|
||||
self.native = native
|
||||
self.encoder = encoder or (lambda x: x)
|
||||
self.decoder = decoder or (lambda x: x)
|
||||
self.validator = validator
|
||||
self._class = _class or type
|
||||
|
||||
def startswith(self, text=None):
|
||||
try:
|
||||
return self.type.startswith(self, text)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
def endswith(self, text=None):
|
||||
try:
|
||||
return self.type.endswith(self, text)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
def __getslice__(self, a=0, b=100):
|
||||
return None
|
||||
|
||||
def __getitem__(self, i):
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return self._class
|
||||
|
||||
|
||||
class RecordUpdater(object):
|
||||
def __init__(self, colset, table, id):
|
||||
self.colset, self.db, self.tablename, self.id = \
|
||||
colset, table._db, table._tablename, id
|
||||
|
||||
def __call__(self, **fields):
|
||||
colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
|
||||
table = db[tablename]
|
||||
newfields = fields or dict(colset)
|
||||
for fieldname in newfields.keys():
|
||||
if not fieldname in table.fields or table[fieldname].type=='id':
|
||||
del newfields[fieldname]
|
||||
table._db(table._id==id,ignore_common_filters=True).update(**newfields)
|
||||
colset.update(newfields)
|
||||
return colset
|
||||
|
||||
class RecordDeleter(object):
|
||||
def __init__(self, table, id):
|
||||
self.db, self.tablename, self.id = table._db, table._tablename, id
|
||||
def __call__(self):
|
||||
return self.db(self.db[self.tablename]._id==self.id).delete()
|
||||
|
||||
|
||||
class MethodAdder(object):
|
||||
def __init__(self,table):
|
||||
self.table = table
|
||||
def __call__(self):
|
||||
return self.register()
|
||||
def __getattr__(self,method_name):
|
||||
return self.register(method_name)
|
||||
def register(self,method_name=None):
|
||||
def _decorated(f):
|
||||
instance = self.table
|
||||
import types
|
||||
method = types.MethodType(f, instance, instance.__class__)
|
||||
name = method_name or f.func_name
|
||||
setattr(instance, name, method)
|
||||
return f
|
||||
return _decorated
|
||||
|
||||
|
||||
class DatabaseStoredFile:
|
||||
|
||||
web2py_filesystems = set()
|
||||
|
||||
def escape(self,obj):
|
||||
return self.db._adapter.escape(obj)
|
||||
|
||||
@staticmethod
|
||||
def try_create_web2py_filesystem(db):
|
||||
if not db._uri in DatabaseStoredFile.web2py_filesystems:
|
||||
if db._adapter.dbengine == 'mysql':
|
||||
sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
|
||||
elif db._adapter.dbengine in ('postgres', 'sqlite'):
|
||||
sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
|
||||
db.executesql(sql)
|
||||
DatabaseStoredFile.web2py_filesystems.add(db._uri)
|
||||
|
||||
def __init__(self,db,filename,mode):
|
||||
if not db._adapter.dbengine in ('mysql', 'postgres', 'sqlite'):
|
||||
raise RuntimeError("only MySQL/Postgres/SQLite can store metadata .table files in database for now")
|
||||
self.db = db
|
||||
self.filename = filename
|
||||
self.mode = mode
|
||||
DatabaseStoredFile.try_create_web2py_filesystem(db)
|
||||
self.p=0
|
||||
self.data = ''
|
||||
if mode in ('r','rw','a'):
|
||||
query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
|
||||
% filename
|
||||
rows = self.db.executesql(query)
|
||||
if rows:
|
||||
self.data = rows[0][0]
|
||||
elif exists(filename):
|
||||
datafile = open(filename, 'r')
|
||||
try:
|
||||
self.data = datafile.read()
|
||||
finally:
|
||||
datafile.close()
|
||||
elif mode in ('r','rw'):
|
||||
raise RuntimeError("File %s does not exist" % filename)
|
||||
|
||||
def read(self, bytes):
|
||||
data = self.data[self.p:self.p+bytes]
|
||||
self.p += len(data)
|
||||
return data
|
||||
|
||||
def readline(self):
|
||||
i = self.data.find('\n',self.p)+1
|
||||
if i>0:
|
||||
data, self.p = self.data[self.p:i], i
|
||||
else:
|
||||
data, self.p = self.data[self.p:], len(self.data)
|
||||
return data
|
||||
|
||||
def write(self,data):
|
||||
self.data += data
|
||||
|
||||
def close_connection(self):
|
||||
if self.db is not None:
|
||||
self.db.executesql(
|
||||
"DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
|
||||
query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
|
||||
% (self.filename, self.data.replace("'","''"))
|
||||
self.db.executesql(query)
|
||||
self.db.commit()
|
||||
self.db = None
|
||||
|
||||
def close(self):
|
||||
self.close_connection()
|
||||
|
||||
@staticmethod
|
||||
def exists(db, filename):
|
||||
if exists(filename):
|
||||
return True
|
||||
|
||||
DatabaseStoredFile.try_create_web2py_filesystem(db)
|
||||
|
||||
query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
|
||||
try:
|
||||
if db.executesql(query):
|
||||
return True
|
||||
except Exception, e:
|
||||
if not (db._adapter.isOperationalError(e) or
|
||||
db._adapter.isProgrammingError(e)):
|
||||
raise
|
||||
# no web2py_filesystem found?
|
||||
tb = traceback.format_exc()
|
||||
LOGGER.error("Could not retrieve %s\n%s" % (filename, tb))
|
||||
return False
|
||||
|
||||
|
||||
class UseDatabaseStoredFile:
|
||||
|
||||
def file_exists(self, filename):
|
||||
return DatabaseStoredFile.exists(self.db,filename)
|
||||
|
||||
def file_open(self, filename, mode='rb', lock=True):
|
||||
return DatabaseStoredFile(self.db,filename,mode)
|
||||
|
||||
def file_close(self, fileobj):
|
||||
fileobj.close_connection()
|
||||
|
||||
def file_delete(self,filename):
|
||||
query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
|
||||
self.db.executesql(query)
|
||||
self.db.commit()
|
||||
|
||||
@@ -0,0 +1,342 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import uuid
|
||||
import re
|
||||
|
||||
from .regex import REGEX_NOPASSWD, REGEX_UNPACK, REGEX_CONST_STRING, REGEX_W
|
||||
from .classes import SQLCustomType
|
||||
#from ..objects import Field, Table
|
||||
|
||||
|
||||
PLURALIZE_RULES = [
|
||||
(re.compile('child$'), re.compile('child$'), 'children'),
|
||||
(re.compile('oot$'), re.compile('oot$'), 'eet'),
|
||||
(re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
|
||||
(re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
|
||||
(re.compile('sis$'), re.compile('sis$'), 'ses'),
|
||||
(re.compile('man$'), re.compile('man$'), 'men'),
|
||||
(re.compile('ife$'), re.compile('ife$'), 'ives'),
|
||||
(re.compile('eau$'), re.compile('eau$'), 'eaux'),
|
||||
(re.compile('lf$'), re.compile('lf$'), 'lves'),
|
||||
(re.compile('[sxz]$'), re.compile('$'), 'es'),
|
||||
(re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
|
||||
(re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
|
||||
(re.compile('$'), re.compile('$'), 's'),
|
||||
]
|
||||
|
||||
def pluralize(singular, rules=PLURALIZE_RULES):
|
||||
for line in rules:
|
||||
re_search, re_sub, replace = line
|
||||
plural = re_search.search(singular) and re_sub.sub(replace, singular)
|
||||
if plural: return plural
|
||||
|
||||
def hide_password(uri):
|
||||
if isinstance(uri,(list,tuple)):
|
||||
return [hide_password(item) for item in uri]
|
||||
return REGEX_NOPASSWD.sub('******',uri)
|
||||
|
||||
|
||||
def cleanup(text):
|
||||
"""
|
||||
Validates that the given text is clean: only contains [0-9a-zA-Z_]
|
||||
"""
|
||||
#if not REGEX_ALPHANUMERIC.match(text):
|
||||
# raise SyntaxError('invalid table or field name: %s' % text)
|
||||
return text
|
||||
|
||||
|
||||
def list_represent(x,r=None):
|
||||
return ', '.join(str(y) for y in x or [])
|
||||
|
||||
|
||||
def xorify(orderby):
|
||||
if not orderby:
|
||||
return None
|
||||
orderby2 = orderby[0]
|
||||
for item in orderby[1:]:
|
||||
orderby2 = orderby2 | item
|
||||
return orderby2
|
||||
|
||||
|
||||
def use_common_filters(query):
|
||||
return (query and hasattr(query,'ignore_common_filters') and \
|
||||
not query.ignore_common_filters)
|
||||
|
||||
|
||||
def bar_escape(item):
|
||||
return str(item).replace('|', '||')
|
||||
|
||||
|
||||
def bar_encode(items):
|
||||
return '|%s|' % '|'.join(bar_escape(item) for item in items if str(item).strip())
|
||||
|
||||
|
||||
def bar_decode_integer(value):
|
||||
if not hasattr(value,'split') and hasattr(value,'read'):
|
||||
value = value.read()
|
||||
return [long(x) for x in value.split('|') if x.strip()]
|
||||
|
||||
|
||||
def bar_decode_string(value):
|
||||
return [x.replace('||', '|') for x in
|
||||
REGEX_UNPACK.split(value[1:-1]) if x.strip()]
|
||||
|
||||
|
||||
def archive_record(qset, fs, archive_table, current_record):
|
||||
tablenames = qset.db._adapter.tables(qset.query)
|
||||
if len(tablenames) != 1:
|
||||
raise RuntimeError("cannot update join")
|
||||
for row in qset.select():
|
||||
fields = archive_table._filter_fields(row)
|
||||
fields[current_record] = row.id
|
||||
archive_table.insert(**fields)
|
||||
return False
|
||||
|
||||
|
||||
def smart_query(fields,text):
|
||||
from ..objects import Field, Table
|
||||
if not isinstance(fields,(list,tuple)):
|
||||
fields = [fields]
|
||||
new_fields = []
|
||||
for field in fields:
|
||||
if isinstance(field,Field):
|
||||
new_fields.append(field)
|
||||
elif isinstance(field,Table):
|
||||
for ofield in field:
|
||||
new_fields.append(ofield)
|
||||
else:
|
||||
raise RuntimeError("fields must be a list of fields")
|
||||
fields = new_fields
|
||||
field_map = {}
|
||||
for field in fields:
|
||||
n = field.name.lower()
|
||||
if not n in field_map:
|
||||
field_map[n] = field
|
||||
n = str(field).lower()
|
||||
if not n in field_map:
|
||||
field_map[n] = field
|
||||
constants = {}
|
||||
i = 0
|
||||
while True:
|
||||
m = REGEX_CONST_STRING.search(text)
|
||||
if not m: break
|
||||
text = text[:m.start()]+('#%i' % i)+text[m.end():]
|
||||
constants[str(i)] = m.group()[1:-1]
|
||||
i+=1
|
||||
text = re.sub('\s+',' ',text).lower()
|
||||
for a,b in [('&','and'),
|
||||
('|','or'),
|
||||
('~','not'),
|
||||
('==','='),
|
||||
('<','<'),
|
||||
('>','>'),
|
||||
('<=','<='),
|
||||
('>=','>='),
|
||||
('<>','!='),
|
||||
('=<','<='),
|
||||
('=>','>='),
|
||||
('=','='),
|
||||
(' less or equal than ','<='),
|
||||
(' greater or equal than ','>='),
|
||||
(' equal or less than ','<='),
|
||||
(' equal or greater than ','>='),
|
||||
(' less or equal ','<='),
|
||||
(' greater or equal ','>='),
|
||||
(' equal or less ','<='),
|
||||
(' equal or greater ','>='),
|
||||
(' not equal to ','!='),
|
||||
(' not equal ','!='),
|
||||
(' equal to ','='),
|
||||
(' equal ','='),
|
||||
(' equals ','='),
|
||||
(' less than ','<'),
|
||||
(' greater than ','>'),
|
||||
(' starts with ','startswith'),
|
||||
(' ends with ','endswith'),
|
||||
(' not in ' , 'notbelongs'),
|
||||
(' in ' , 'belongs'),
|
||||
(' is ','=')]:
|
||||
if a[0]==' ':
|
||||
text = text.replace(' is'+a,' %s ' % b)
|
||||
text = text.replace(a,' %s ' % b)
|
||||
text = re.sub('\s+',' ',text).lower()
|
||||
text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
|
||||
query = field = neg = op = logic = None
|
||||
for item in text.split():
|
||||
if field is None:
|
||||
if item == 'not':
|
||||
neg = True
|
||||
elif not neg and not logic and item in ('and','or'):
|
||||
logic = item
|
||||
elif item in field_map:
|
||||
field = field_map[item]
|
||||
else:
|
||||
raise RuntimeError("Invalid syntax")
|
||||
elif not field is None and op is None:
|
||||
op = item
|
||||
elif not op is None:
|
||||
if item.startswith('#'):
|
||||
if not item[1:] in constants:
|
||||
raise RuntimeError("Invalid syntax")
|
||||
value = constants[item[1:]]
|
||||
else:
|
||||
value = item
|
||||
if field.type in ('text', 'string', 'json'):
|
||||
if op == '=': op = 'like'
|
||||
if op == '=': new_query = field==value
|
||||
elif op == '<': new_query = field<value
|
||||
elif op == '>': new_query = field>value
|
||||
elif op == '<=': new_query = field<=value
|
||||
elif op == '>=': new_query = field>=value
|
||||
elif op == '!=': new_query = field!=value
|
||||
elif op == 'belongs': new_query = field.belongs(value.split(','))
|
||||
elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
|
||||
elif field.type in ('text', 'string', 'json'):
|
||||
if op == 'contains': new_query = field.contains(value)
|
||||
elif op == 'like': new_query = field.ilike(value)
|
||||
elif op == 'startswith': new_query = field.startswith(value)
|
||||
elif op == 'endswith': new_query = field.endswith(value)
|
||||
else: raise RuntimeError("Invalid operation")
|
||||
elif field._db._adapter.dbengine=='google:datastore' and \
|
||||
field.type in ('list:integer', 'list:string', 'list:reference'):
|
||||
if op == 'contains': new_query = field.contains(value)
|
||||
else: raise RuntimeError("Invalid operation")
|
||||
else: raise RuntimeError("Invalid operation")
|
||||
if neg: new_query = ~new_query
|
||||
if query is None:
|
||||
query = new_query
|
||||
elif logic == 'and':
|
||||
query &= new_query
|
||||
elif logic == 'or':
|
||||
query |= new_query
|
||||
field = op = neg = logic = None
|
||||
return query
|
||||
|
||||
|
||||
def sqlhtml_validators(field):
|
||||
"""
|
||||
Field type validation, using web2py's validators mechanism.
|
||||
|
||||
makes sure the content of a field is in line with the declared
|
||||
fieldtype
|
||||
"""
|
||||
db = field.db
|
||||
try:
|
||||
from gluon import validators
|
||||
except ImportError:
|
||||
return []
|
||||
field_type, field_length = field.type, field.length
|
||||
if isinstance(field_type, SQLCustomType):
|
||||
if hasattr(field_type, 'validator'):
|
||||
return field_type.validator
|
||||
else:
|
||||
field_type = field_type.type
|
||||
elif not isinstance(field_type,str):
|
||||
return []
|
||||
requires=[]
|
||||
def ff(r,id):
|
||||
row=r(id)
|
||||
if not row:
|
||||
return str(id)
|
||||
elif hasattr(r, '_format') and isinstance(r._format,str):
|
||||
return r._format % row
|
||||
elif hasattr(r, '_format') and callable(r._format):
|
||||
return r._format(row)
|
||||
else:
|
||||
return str(id)
|
||||
if field_type in (('string', 'text', 'password')):
|
||||
requires.append(validators.IS_LENGTH(field_length))
|
||||
elif field_type == 'json':
|
||||
requires.append(validators.IS_EMPTY_OR(validators.IS_JSON()))
|
||||
elif field_type == 'double' or field_type == 'float':
|
||||
requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
|
||||
elif field_type == 'integer':
|
||||
requires.append(validators.IS_INT_IN_RANGE(-2**31, 2**31))
|
||||
elif field_type == 'bigint':
|
||||
requires.append(validators.IS_INT_IN_RANGE(-2**63, 2**63))
|
||||
elif field_type.startswith('decimal'):
|
||||
requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
|
||||
elif field_type == 'date':
|
||||
requires.append(validators.IS_DATE())
|
||||
elif field_type == 'time':
|
||||
requires.append(validators.IS_TIME())
|
||||
elif field_type == 'datetime':
|
||||
requires.append(validators.IS_DATETIME())
|
||||
elif db and field_type.startswith('reference') and \
|
||||
field_type.find('.') < 0 and \
|
||||
field_type[10:] in db.tables:
|
||||
referenced = db[field_type[10:]]
|
||||
def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
|
||||
field.represent = field.represent or repr_ref
|
||||
if hasattr(referenced, '_format') and referenced._format:
|
||||
requires = validators.IS_IN_DB(db,referenced._id,
|
||||
referenced._format)
|
||||
if field.unique:
|
||||
requires._and = validators.IS_NOT_IN_DB(db,field)
|
||||
if field.tablename == field_type[10:]:
|
||||
return validators.IS_EMPTY_OR(requires)
|
||||
return requires
|
||||
elif db and field_type.startswith('list:reference') and \
|
||||
field_type.find('.') < 0 and \
|
||||
field_type[15:] in db.tables:
|
||||
referenced = db[field_type[15:]]
|
||||
def list_ref_repr(ids, row=None, r=referenced, f=ff):
|
||||
if not ids:
|
||||
return None
|
||||
from ..adapters.google import GoogleDatastoreAdapter
|
||||
refs = None
|
||||
db, id = r._db, r._id
|
||||
if isinstance(db._adapter, GoogleDatastoreAdapter):
|
||||
def count(values): return db(id.belongs(values)).select(id)
|
||||
rx = range(0, len(ids), 30)
|
||||
refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
|
||||
else:
|
||||
refs = db(id.belongs(ids)).select(id)
|
||||
return (refs and ', '.join(f(r,x.id) for x in refs) or '')
|
||||
field.represent = field.represent or list_ref_repr
|
||||
if hasattr(referenced, '_format') and referenced._format:
|
||||
requires = validators.IS_IN_DB(db,referenced._id,
|
||||
referenced._format,multiple=True)
|
||||
else:
|
||||
requires = validators.IS_IN_DB(db,referenced._id,
|
||||
multiple=True)
|
||||
if field.unique:
|
||||
requires._and = validators.IS_NOT_IN_DB(db,field)
|
||||
if not field.notnull:
|
||||
requires = validators.IS_EMPTY_OR(requires)
|
||||
return requires
|
||||
elif field_type.startswith('list:'):
|
||||
def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
|
||||
field.represent = field.represent or repr_list
|
||||
if field.unique:
|
||||
requires.append(validators.IS_NOT_IN_DB(db, field))
|
||||
sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
|
||||
if field.notnull and not field_type[:2] in sff:
|
||||
requires.append(validators.IS_NOT_EMPTY())
|
||||
elif not field.notnull and field_type[:2] in sff and requires:
|
||||
requires[0] = validators.IS_EMPTY_OR(requires[0])
|
||||
return requires
|
||||
|
||||
|
||||
def varquote_aux(name,quotestr='%s'):
|
||||
return name if REGEX_W.match(name) else quotestr % name
|
||||
|
||||
|
||||
def uuid2int(uuidv):
|
||||
return uuid.UUID(uuidv).int
|
||||
|
||||
|
||||
def int2uuid(n):
|
||||
return str(uuid.UUID(int=n))
|
||||
|
||||
|
||||
# Geodal utils
|
||||
def geoPoint(x, y):
|
||||
return "POINT (%f %f)" % (x, y)
|
||||
|
||||
|
||||
def geoLine(*line):
|
||||
return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
|
||||
|
||||
|
||||
def geoPolygon(*line):
|
||||
return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
|
||||
@@ -0,0 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
|
||||
REGEX_TYPE = re.compile('^([\w\_\:]+)')
|
||||
REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
|
||||
REGEX_W = re.compile('^\w+$')
|
||||
REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.([^.]+)$')
|
||||
REGEX_NO_GREEDY_ENTITY_NAME = r'(.+?)'
|
||||
REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)(\.(?P<name>\w+))?\.\w+$')
|
||||
REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
|
||||
REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
|
||||
REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
|
||||
REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
|
||||
REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
|
||||
REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
|
||||
REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
|
||||
REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
|
||||
REGEX_QUOTES = re.compile("'[^']*'")
|
||||
REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
|
||||
REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
|
||||
REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)') # was '(?<=[\:\/])([^:@/]+)(?=@.+)'
|
||||
File diff suppressed because it is too large
Load Diff
+5
-60
@@ -94,7 +94,7 @@ def parse_version(version):
|
||||
return version_tuple
|
||||
|
||||
def read_file(filename, mode='r'):
|
||||
"""Returns content from filename, making sure to close the file explicitly
|
||||
"""Returns content from filename, making sure to close the file explicitly
|
||||
on exit.
|
||||
"""
|
||||
f = open(filename, mode)
|
||||
@@ -105,7 +105,7 @@ def read_file(filename, mode='r'):
|
||||
|
||||
|
||||
def write_file(filename, value, mode='w'):
|
||||
"""Writes <value> to filename, making sure to close the file
|
||||
"""Writes <value> to filename, making sure to close the file
|
||||
explicitly on exit.
|
||||
"""
|
||||
f = open(filename, mode)
|
||||
@@ -193,62 +193,7 @@ def cleanpath(path):
|
||||
|
||||
|
||||
def _extractall(filename, path='.', members=None):
|
||||
# FIXME: this should be dropped because python 2.4 support was dropped
|
||||
if not hasattr(tarfile.TarFile, 'extractall'):
|
||||
from tarfile import ExtractError
|
||||
|
||||
class TarFile(tarfile.TarFile):
|
||||
|
||||
def extractall(self, path='.', members=None):
|
||||
"""Extract all members from the archive to the current working
|
||||
directory and set owner, modification time and permissions on
|
||||
directories afterwards. `path' specifies a different directory
|
||||
to extract to. `members' is optional and must be a subset of the
|
||||
list returned by getmembers().
|
||||
"""
|
||||
|
||||
directories = []
|
||||
if members is None:
|
||||
members = self
|
||||
for tarinfo in members:
|
||||
if tarinfo.isdir():
|
||||
|
||||
# Extract directory with a safe mode, so that
|
||||
# all files below can be extracted as well.
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.join(path,
|
||||
tarinfo.name), 0777)
|
||||
except EnvironmentError:
|
||||
pass
|
||||
directories.append(tarinfo)
|
||||
else:
|
||||
self.extract(tarinfo, path)
|
||||
|
||||
# Reverse sort directories.
|
||||
|
||||
directories.sort(lambda a, b: cmp(a.name, b.name))
|
||||
directories.reverse()
|
||||
|
||||
# Set correct owner, mtime and filemode on directories.
|
||||
|
||||
for tarinfo in directories:
|
||||
path = os.path.join(path, tarinfo.name)
|
||||
try:
|
||||
self.chown(tarinfo, path)
|
||||
self.utime(tarinfo, path)
|
||||
self.chmod(tarinfo, path)
|
||||
except ExtractError, e:
|
||||
if self.errorlevel > 1:
|
||||
raise
|
||||
else:
|
||||
self._dbg(1, 'tarfile: %s' % e)
|
||||
|
||||
_cls = TarFile
|
||||
else:
|
||||
_cls = tarfile.TarFile
|
||||
|
||||
tar = _cls(filename, 'r')
|
||||
tar = tarfile.TarFile(filename, 'r')
|
||||
ret = tar.extractall(path, members)
|
||||
tar.close()
|
||||
return ret
|
||||
@@ -401,7 +346,7 @@ def get_session(request, other_application='admin'):
|
||||
session_filename = os.path.join(
|
||||
up(request.folder), other_application, 'sessions', session_id)
|
||||
if not os.path.exists(session_filename):
|
||||
session_filename = generate(session_filename)
|
||||
session_filename = generate(session_filename)
|
||||
osession = storage.load_storage(session_filename)
|
||||
except Exception, e:
|
||||
osession = storage.Storage()
|
||||
@@ -493,7 +438,7 @@ from settings import global_settings # we need to import settings here because
|
||||
|
||||
|
||||
def abspath(*relpath, **base):
|
||||
"""Converts relative path to absolute path based (by default) on
|
||||
"""Converts relative path to absolute path based (by default) on
|
||||
applications_parent
|
||||
"""
|
||||
path = os.path.join(*relpath)
|
||||
|
||||
+10
-7
@@ -25,6 +25,7 @@ from gluon.serializers import json, custom_json
|
||||
import gluon.settings as settings
|
||||
from gluon.utils import web2py_uuid, secure_dumps, secure_loads
|
||||
from gluon.settings import global_settings
|
||||
from gluon.dal import Field
|
||||
from gluon import recfile
|
||||
import hashlib
|
||||
import portalocker
|
||||
@@ -197,7 +198,7 @@ class Request(Storage):
|
||||
def parse_get_vars(self):
|
||||
"""Takes the QUERY_STRING and unpacks it to get_vars
|
||||
"""
|
||||
query_string = self.env.get('QUERY_STRING', '')
|
||||
query_string = self.env.get('QUERY_STRING', '')
|
||||
dget = urlparse.parse_qs(query_string, keep_blank_values=1) # Ref: https://docs.python.org/2/library/cgi.html#cgi.parse_qs
|
||||
get_vars = self._get_vars = Storage(dget)
|
||||
for (key, value) in get_vars.iteritems():
|
||||
@@ -254,7 +255,7 @@ class Request(Storage):
|
||||
# its value else leave it alone
|
||||
|
||||
pvalue = listify([(_dpk if _dpk.filename else _dpk.value)
|
||||
for _dpk in dpk]
|
||||
for _dpk in dpk]
|
||||
if isinstance(dpk, list) else
|
||||
(dpk if dpk.filename else dpk.value))
|
||||
if len(pvalue):
|
||||
@@ -393,7 +394,7 @@ class Response(Storage):
|
||||
self._view_environment = None
|
||||
self._custom_commit = None
|
||||
self._custom_rollback = None
|
||||
self.generic_patterns = ['*']
|
||||
self.generic_patterns = ['*']
|
||||
self.delimiters = ('{{','}}')
|
||||
self.formstyle = 'table3cols'
|
||||
self.form_label_separator = ': '
|
||||
@@ -550,7 +551,7 @@ class Response(Storage):
|
||||
else:
|
||||
attname = filename
|
||||
headers["Content-Disposition"] = \
|
||||
"attachment;filename=%s" % attname
|
||||
'attachment;filename="%s"' % attname
|
||||
|
||||
if not request:
|
||||
request = current.request
|
||||
@@ -625,6 +626,8 @@ class Response(Storage):
|
||||
return self.stream(stream, chunk_size=chunk_size, request=request)
|
||||
|
||||
def json(self, data, default=None):
|
||||
if 'Content-Type' not in self.headers:
|
||||
self.headers['Content-Type'] = 'application/json'
|
||||
return json(data, default=default or custom_json)
|
||||
|
||||
def xmlrpc(self, request, methods):
|
||||
@@ -691,10 +694,10 @@ class Response(Storage):
|
||||
DIV(BEAUTIFY(current.response), backtotop,
|
||||
_class="w2p-toolbar-hidden", _id="response-%s" % u),
|
||||
DIV(BEAUTIFY(dbtables), backtotop,
|
||||
_class="w2p-toolbar-hidden",_id="db-tables-%s" % u),
|
||||
_class="w2p-toolbar-hidden",_id="db-tables-%s" % u),
|
||||
DIV(BEAUTIFY(dbstats), backtotop,
|
||||
_class="w2p-toolbar-hidden", _id="db-stats-%s" % u),
|
||||
SCRIPT("jQuery('.w2p-toolbar-hidden').hide()"),
|
||||
SCRIPT("jQuery('.w2p-toolbar-hidden').hide()"),
|
||||
_id="totop-%s" % u
|
||||
)
|
||||
|
||||
@@ -868,7 +871,7 @@ class Session(Storage):
|
||||
table_migrate = False
|
||||
tname = tablename + '_' + masterapp
|
||||
table = db.get(tname, None)
|
||||
Field = db.Field
|
||||
#Field = db.Field
|
||||
if table is None:
|
||||
db.define_table(
|
||||
tname,
|
||||
|
||||
+2
-2
@@ -850,7 +850,7 @@ class DIV(XmlComponent):
|
||||
"""
|
||||
components = []
|
||||
for c in self.components:
|
||||
if isinstance(c, allowed_parents):
|
||||
if isinstance(c, (allowed_parents,CAT)):
|
||||
pass
|
||||
elif wrap_lambda:
|
||||
c = wrap_lambda(c)
|
||||
@@ -1864,7 +1864,7 @@ class INPUT(DIV):
|
||||
break
|
||||
if not name in self.errors:
|
||||
self.vars[name] = value
|
||||
return True
|
||||
return True
|
||||
return False
|
||||
|
||||
def _postprocessing(self):
|
||||
|
||||
+3
-3
@@ -93,7 +93,7 @@ from gluon.globals import Request, Response, Session
|
||||
from gluon.compileapp import build_environment, run_models_in, \
|
||||
run_controller_in, run_view_in
|
||||
from gluon.contenttype import contenttype
|
||||
from gluon.dal import BaseAdapter
|
||||
from gluon.dal.base import BaseAdapter
|
||||
from gluon.validators import CRYPT
|
||||
from gluon.html import URL, xmlescape
|
||||
from gluon.utils import is_valid_ip_address, getipaddrinfo
|
||||
@@ -365,8 +365,8 @@ def wsgibase(environ, responder):
|
||||
client = client,
|
||||
folder = abspath('applications', app) + os.sep,
|
||||
ajax = x_req_with == 'xmlhttprequest',
|
||||
cid = env.http_web2py_component_element,
|
||||
is_local = (env.remote_addr in local_hosts and
|
||||
cid = env.http_web2py_component_element,
|
||||
is_local = (env.remote_addr in local_hosts and
|
||||
client == env.remote_addr),
|
||||
is_shell = cmd_opts and cmd_opts.shell,
|
||||
is_sheduler = cmd_opts and cmd_opts.scheduler,
|
||||
|
||||
+63
-63
@@ -1,63 +1,63 @@
|
||||
import os, uuid
|
||||
|
||||
def generate(filename, depth=2, base=512):
|
||||
if os.path.sep in filename:
|
||||
path, filename = os.path.split(filename)
|
||||
else:
|
||||
path = None
|
||||
dummyhash = sum(ord(c)*256**(i % 4) for i,c in enumerate(filename)) % base**depth
|
||||
folders = []
|
||||
for level in range(depth-1,-1,-1):
|
||||
code, dummyhash = divmod(dummyhash, base**level)
|
||||
folders.append("%03x" % code)
|
||||
folders.append(filename)
|
||||
if path:
|
||||
folders.insert(0,path)
|
||||
return os.path.join(*folders)
|
||||
|
||||
def exists(filename, path=None):
|
||||
if os.path.exists(filename):
|
||||
return True
|
||||
if path is None:
|
||||
path, filename = os.path.split(filename)
|
||||
fullfilename = os.path.join(path, generate(filename))
|
||||
if os.path.exists(fullfilename):
|
||||
return True
|
||||
return False
|
||||
|
||||
def remove(filename, path=None):
|
||||
if os.path.exists(filename):
|
||||
return os.unlink(filename)
|
||||
if path is None:
|
||||
path, filename = os.path.split(filename)
|
||||
fullfilename = os.path.join(path, generate(filename))
|
||||
if os.path.exists(fullfilename):
|
||||
return os.unlink(fullfilename)
|
||||
raise IOError
|
||||
|
||||
def open(filename, mode="r", path=None):
|
||||
if not path:
|
||||
path, filename = os.path.split(filename)
|
||||
fullfilename = None
|
||||
if not mode.startswith('w'):
|
||||
fullfilename = os.path.join(path, filename)
|
||||
if not os.path.exists(fullfilename):
|
||||
fullfilename = None
|
||||
if not fullfilename:
|
||||
fullfilename = os.path.join(path, generate(filename))
|
||||
if mode.startswith('w') and not os.path.exists(os.path.dirname(fullfilename)):
|
||||
os.makedirs(os.path.dirname(fullfilename))
|
||||
return file(fullfilename, mode)
|
||||
|
||||
def test():
|
||||
if not os.path.exists('tests'):
|
||||
os.mkdir('tests')
|
||||
for k in range(20):
|
||||
filename = os.path.join('tests',str(uuid.uuid4())+'.test')
|
||||
open(filename, "w").write('test')
|
||||
assert open(filename, "r").read()=='test'
|
||||
if exists(filename):
|
||||
remove(filename)
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
import os, uuid
|
||||
|
||||
def generate(filename, depth=2, base=512):
|
||||
if os.path.sep in filename:
|
||||
path, filename = os.path.split(filename)
|
||||
else:
|
||||
path = None
|
||||
dummyhash = sum(ord(c)*256**(i % 4) for i,c in enumerate(filename)) % base**depth
|
||||
folders = []
|
||||
for level in range(depth-1,-1,-1):
|
||||
code, dummyhash = divmod(dummyhash, base**level)
|
||||
folders.append("%03x" % code)
|
||||
folders.append(filename)
|
||||
if path:
|
||||
folders.insert(0,path)
|
||||
return os.path.join(*folders)
|
||||
|
||||
def exists(filename, path=None):
|
||||
if os.path.exists(filename):
|
||||
return True
|
||||
if path is None:
|
||||
path, filename = os.path.split(filename)
|
||||
fullfilename = os.path.join(path, generate(filename))
|
||||
if os.path.exists(fullfilename):
|
||||
return True
|
||||
return False
|
||||
|
||||
def remove(filename, path=None):
|
||||
if os.path.exists(filename):
|
||||
return os.unlink(filename)
|
||||
if path is None:
|
||||
path, filename = os.path.split(filename)
|
||||
fullfilename = os.path.join(path, generate(filename))
|
||||
if os.path.exists(fullfilename):
|
||||
return os.unlink(fullfilename)
|
||||
raise IOError
|
||||
|
||||
def open(filename, mode="r", path=None):
|
||||
if not path:
|
||||
path, filename = os.path.split(filename)
|
||||
fullfilename = None
|
||||
if not mode.startswith('w'):
|
||||
fullfilename = os.path.join(path, filename)
|
||||
if not os.path.exists(fullfilename):
|
||||
fullfilename = None
|
||||
if not fullfilename:
|
||||
fullfilename = os.path.join(path, generate(filename))
|
||||
if mode.startswith('w') and not os.path.exists(os.path.dirname(fullfilename)):
|
||||
os.makedirs(os.path.dirname(fullfilename))
|
||||
return file(fullfilename, mode)
|
||||
|
||||
def test():
|
||||
if not os.path.exists('tests'):
|
||||
os.mkdir('tests')
|
||||
for k in range(20):
|
||||
filename = os.path.join('tests',str(uuid.uuid4())+'.test')
|
||||
open(filename, "w").write('test')
|
||||
assert open(filename, "r").read()=='test'
|
||||
if exists(filename):
|
||||
remove(filename)
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
||||
+2
-1
@@ -878,6 +878,7 @@ class MapUrlIn(object):
|
||||
self.domain_application = None
|
||||
self.domain_controller = None
|
||||
self.domain_function = None
|
||||
self.map_hyphen = base.map_hyphen
|
||||
arg0 = self.harg0
|
||||
if not base.exclusive_domain and base.applications and arg0 in base.applications:
|
||||
self.application = arg0
|
||||
@@ -1256,9 +1257,9 @@ class MapUrlOut(object):
|
||||
"Builds a/c/f from components"
|
||||
acf = ''
|
||||
if self.map_hyphen:
|
||||
self.application = self.application.replace('_', '-')
|
||||
self.controller = self.controller.replace('_', '-')
|
||||
if self.controller != 'static' and not self.controller.startswith('static/'):
|
||||
self.application = self.application.replace('_', '-')
|
||||
self.function = self.function.replace('_', '-')
|
||||
if not self.omit_application:
|
||||
acf += '/' + self.application
|
||||
|
||||
+5
-4
@@ -1850,13 +1850,14 @@ class WSGIWorker(Worker):
|
||||
if data:
|
||||
self.write(data, sections)
|
||||
|
||||
if self.chunked:
|
||||
# If chunked, send our final chunk length
|
||||
self.conn.sendall(b('0\r\n\r\n'))
|
||||
elif not self.headers_sent:
|
||||
if not self.headers_sent:
|
||||
# Send headers if the body was empty
|
||||
self.send_headers('', sections)
|
||||
|
||||
if self.chunked and self.request_method != 'HEAD':
|
||||
# If chunked, send our final chunk length
|
||||
self.conn.sendall(b('0\r\n\r\n'))
|
||||
|
||||
# Don't capture exceptions here. The Worker class handles
|
||||
# them appropriately.
|
||||
finally:
|
||||
|
||||
+5
-14
@@ -10,8 +10,7 @@ Cross-site scripting (XSS) defense
|
||||
-----------------------------------
|
||||
"""
|
||||
|
||||
|
||||
from htmllib import HTMLParser
|
||||
from HTMLParser import HTMLParser
|
||||
from cgi import escape
|
||||
from urlparse import urlparse
|
||||
from formatter import AbstractFormatter
|
||||
@@ -48,11 +47,10 @@ class XssCleaner(HTMLParser):
|
||||
],
|
||||
allowed_attributes={'a': ['href', 'title'], 'img': ['src', 'alt'
|
||||
], 'blockquote': ['type']},
|
||||
fmt=AbstractFormatter,
|
||||
strip_disallowed=False
|
||||
):
|
||||
|
||||
HTMLParser.__init__(self, fmt)
|
||||
HTMLParser.__init__(self)
|
||||
self.result = ''
|
||||
self.open_tags = []
|
||||
self.permitted_tags = [i for i in permitted_tags if i[-1] != '/']
|
||||
@@ -77,7 +75,7 @@ class XssCleaner(HTMLParser):
|
||||
def handle_charref(self, ref):
|
||||
if self.in_disallowed:
|
||||
return
|
||||
elif len(ref) < 7 and ref.isdigit():
|
||||
elif len(ref) < 7 and (ref.isdigit() or ref == 'x27'): # x27 is a special case for apostrophe
|
||||
self.result += '&#%s;' % ref
|
||||
else:
|
||||
self.result += xssescape('&#%s' % ref)
|
||||
@@ -99,8 +97,7 @@ class XssCleaner(HTMLParser):
|
||||
def handle_starttag(
|
||||
self,
|
||||
tag,
|
||||
method,
|
||||
attrs,
|
||||
attrs
|
||||
):
|
||||
if tag not in self.permitted_tags:
|
||||
if self.strip_disallowed:
|
||||
@@ -130,7 +127,7 @@ class XssCleaner(HTMLParser):
|
||||
self.result += bt
|
||||
self.open_tags.insert(0, tag)
|
||||
|
||||
def handle_endtag(self, tag, attrs):
|
||||
def handle_endtag(self, tag):
|
||||
bracketed = '</%s>' % tag
|
||||
if tag not in self.permitted_tags:
|
||||
if self.strip_disallowed:
|
||||
@@ -141,12 +138,6 @@ class XssCleaner(HTMLParser):
|
||||
self.result += bracketed
|
||||
self.open_tags.remove(tag)
|
||||
|
||||
def unknown_starttag(self, tag, attributes):
|
||||
self.handle_starttag(tag, None, attributes)
|
||||
|
||||
def unknown_endtag(self, tag):
|
||||
self.handle_endtag(tag, None)
|
||||
|
||||
def url_is_acceptable(self, url):
|
||||
"""
|
||||
Accepts relative, absolute, and mailto urls
|
||||
|
||||
+47
-29
@@ -87,11 +87,17 @@ if 'WEB2PY_PATH' not in os.environ:
|
||||
os.environ['WEB2PY_PATH'] = path
|
||||
|
||||
try:
|
||||
from gluon.contrib.simplejson import loads, dumps
|
||||
except:
|
||||
# try external module
|
||||
from simplejson import loads, dumps
|
||||
except ImportError:
|
||||
try:
|
||||
# try stdlib (Python >= 2.6)
|
||||
from json import loads, dumps
|
||||
except:
|
||||
# fallback to pure-Python module
|
||||
from gluon.contrib.simplejson import loads, dumps
|
||||
|
||||
IDENTIFIER = "%s#%s" % (socket.gethostname(),os.getpid())
|
||||
IDENTIFIER = "%s#%s" % (socket.gethostname(), os.getpid())
|
||||
|
||||
logger = logging.getLogger('web2py.scheduler.%s' % IDENTIFIER)
|
||||
|
||||
@@ -160,6 +166,7 @@ class TaskReport(object):
|
||||
def __str__(self):
|
||||
return '<TaskReport: %s>' % self.status
|
||||
|
||||
|
||||
class JobGraph(object):
|
||||
"""Experimental: with JobGraph you can specify
|
||||
dependencies amongs tasks"""
|
||||
@@ -170,7 +177,9 @@ class JobGraph(object):
|
||||
|
||||
def add_deps(self, task_parent, task_child):
|
||||
"""Creates a dependency between task_parent and task_child"""
|
||||
self.db.scheduler_task_deps.insert(task_parent=task_parent, task_child=task_child, job_name=self.job_name)
|
||||
self.db.scheduler_task_deps.insert(task_parent=task_parent,
|
||||
task_child=task_child,
|
||||
job_name=self.job_name)
|
||||
|
||||
def validate(self, job_name):
|
||||
"""Validates if all tasks job_name can be completed, i.e. there
|
||||
@@ -195,16 +204,18 @@ class JobGraph(object):
|
||||
try:
|
||||
rtn = []
|
||||
for k, v in nested_dict.items():
|
||||
v.discard(k) # Ignore self dependencies
|
||||
v.discard(k) # Ignore self dependencies
|
||||
extra_items_in_deps = reduce(set.union, nested_dict.values()) - set(nested_dict.keys())
|
||||
nested_dict.update(dict((item, set()) for item in extra_items_in_deps))
|
||||
while True:
|
||||
ordered = set(item for item,dep in nested_dict.items() if not dep)
|
||||
ordered = set(item for item, dep in nested_dict.items() if not dep)
|
||||
if not ordered:
|
||||
break
|
||||
rtn.append(ordered)
|
||||
nested_dict = dict((item, (dep - ordered)) for item, dep in nested_dict.items()
|
||||
if item not in ordered)
|
||||
nested_dict = dict(
|
||||
(item, (dep - ordered)) for item, dep in nested_dict.items()
|
||||
if item not in ordered
|
||||
)
|
||||
assert not nested_dict, "A cyclic dependency exists amongst %r" % nested_dict
|
||||
db.commit()
|
||||
return rtn
|
||||
@@ -212,6 +223,7 @@ class JobGraph(object):
|
||||
db.rollback()
|
||||
return None
|
||||
|
||||
|
||||
def demo_function(*argv, **kwargs):
|
||||
""" test function """
|
||||
for i in range(argv[0]):
|
||||
@@ -268,7 +280,7 @@ def executor(queue, task, out):
|
||||
def write(self, data):
|
||||
self.out_queue.put(data)
|
||||
|
||||
W2P_TASK = Storage({'id' : task.task_id, 'uuid' : task.uuid})
|
||||
W2P_TASK = Storage({'id': task.task_id, 'uuid': task.uuid})
|
||||
stdout = LogOutput(out)
|
||||
try:
|
||||
if task.app:
|
||||
@@ -293,7 +305,7 @@ def executor(queue, task, out):
|
||||
raise NameError(
|
||||
"name '%s' not found in scheduler's environment" % f)
|
||||
#Inject W2P_TASK into environment
|
||||
_env.update({'W2P_TASK' : W2P_TASK})
|
||||
_env.update({'W2P_TASK': W2P_TASK})
|
||||
#Inject W2P_TASK into current
|
||||
from gluon import current
|
||||
current.W2P_TASK = W2P_TASK
|
||||
@@ -357,8 +369,7 @@ class MetaScheduler(threading.Thread):
|
||||
|
||||
start = time.time()
|
||||
|
||||
while p.is_alive() and (
|
||||
not task.timeout or time.time() - start < task.timeout):
|
||||
while p.is_alive() and (not task.timeout or time.time() - start < task.timeout):
|
||||
if tout:
|
||||
try:
|
||||
logger.debug(' partial output saved')
|
||||
@@ -568,7 +579,7 @@ class Scheduler(MetaScheduler):
|
||||
queue=0,
|
||||
distribution=None,
|
||||
workers=0)
|
||||
) #dict holding statistics
|
||||
) # dict holding statistics
|
||||
|
||||
from gluon import current
|
||||
current._scheduler = self
|
||||
@@ -598,7 +609,7 @@ class Scheduler(MetaScheduler):
|
||||
|
||||
def define_tables(self, db, migrate):
|
||||
"""Defines Scheduler tables structure"""
|
||||
from gluon.dal import DEFAULT
|
||||
from gluon.dal.base import DEFAULT
|
||||
logger.debug('defining tables (migrate=%s)', migrate)
|
||||
now = self.now
|
||||
db.define_table(
|
||||
@@ -631,7 +642,7 @@ class Scheduler(MetaScheduler):
|
||||
Field('prevent_drift', 'boolean', default=False,
|
||||
comment='Cron-like start_times between runs'),
|
||||
Field('timeout', 'integer', default=60, comment='seconds',
|
||||
requires=IS_INT_IN_RANGE(0, None)),
|
||||
requires=IS_INT_IN_RANGE(1, None)),
|
||||
Field('sync_output', 'integer', default=0,
|
||||
comment="update output every n sec: 0=never",
|
||||
requires=IS_INT_IN_RANGE(0, None)),
|
||||
@@ -740,7 +751,7 @@ class Scheduler(MetaScheduler):
|
||||
contention and retries `assign_task` after 0.5 seconds
|
||||
"""
|
||||
logger.debug('Assigning tasks...')
|
||||
db.commit() #db.commit() only for Mysql
|
||||
db.commit() # db.commit() only for Mysql
|
||||
x = 0
|
||||
while x < 10:
|
||||
try:
|
||||
@@ -761,7 +772,7 @@ class Scheduler(MetaScheduler):
|
||||
contention and retries `pop_task` after 0.5 seconds
|
||||
"""
|
||||
db = self.db
|
||||
db.commit() #another nifty db.commit() only for Mysql
|
||||
db.commit() # another nifty db.commit() only for Mysql
|
||||
x = 0
|
||||
while x < 10:
|
||||
try:
|
||||
@@ -906,7 +917,8 @@ class Scheduler(MetaScheduler):
|
||||
times_failed=0
|
||||
)
|
||||
db(st.id == task.task_id).update(**d)
|
||||
self.update_dependencies(db, task.task_id)
|
||||
if status == COMPLETED:
|
||||
self.update_dependencies(db, task.task_id)
|
||||
else:
|
||||
st_mapping = {'FAILED': 'FAILED',
|
||||
'TIMEOUT': 'TIMEOUT',
|
||||
@@ -1076,6 +1088,8 @@ class Scheduler(MetaScheduler):
|
||||
#build workers as dict of groups
|
||||
wkgroups = {}
|
||||
for w in all_workers:
|
||||
if w.worker_stats['status'] == 'RUNNING':
|
||||
continue
|
||||
group_names = w.group_names
|
||||
for gname in group_names:
|
||||
if gname not in wkgroups:
|
||||
@@ -1089,8 +1103,9 @@ class Scheduler(MetaScheduler):
|
||||
db(
|
||||
(st.status.belongs((QUEUED, ASSIGNED))) &
|
||||
(st.stop_time < now)
|
||||
).update(status=EXPIRED)
|
||||
).update(status=EXPIRED)
|
||||
|
||||
#calculate dependencies
|
||||
deps_with_no_deps = db(
|
||||
(sd.can_visit == False) &
|
||||
(~sd.task_child.belongs(
|
||||
@@ -1162,7 +1177,7 @@ class Scheduler(MetaScheduler):
|
||||
if not task.task_name:
|
||||
d['task_name'] = task.function_name
|
||||
db(
|
||||
(st.id==task.id) &
|
||||
(st.id == task.id) &
|
||||
(st.status.belongs((QUEUED, ASSIGNED)))
|
||||
).update(**d)
|
||||
wkgroups[gname]['workers'][myw]['c'] += 1
|
||||
@@ -1186,13 +1201,16 @@ class Scheduler(MetaScheduler):
|
||||
# should only sleep until next available task
|
||||
|
||||
def set_worker_status(self, group_names=None, action=ACTIVE,
|
||||
exclude=None, limit=None):
|
||||
exclude=None, limit=None, worker_name=None):
|
||||
"""Internal function to set worker's status"""
|
||||
ws = self.db.scheduler_worker
|
||||
if not group_names:
|
||||
group_names = self.group_names
|
||||
elif isinstance(group_names, str):
|
||||
group_names = [group_names]
|
||||
if worker_name:
|
||||
self.db(ws.worker_name == worker_name).update(status=action)
|
||||
return
|
||||
exclusion = exclude and exclude.append(action) or [action]
|
||||
if not limit:
|
||||
for group in group_names:
|
||||
@@ -1203,12 +1221,12 @@ class Scheduler(MetaScheduler):
|
||||
else:
|
||||
for group in group_names:
|
||||
workers = self.db(
|
||||
(ws.group_names.contains(group)) &
|
||||
(~ws.status.belongs(exclusion))
|
||||
(ws.group_names.contains(group)) &
|
||||
(~ws.status.belongs(exclusion))
|
||||
)._select(ws.id, limitby=(0,limit))
|
||||
self.db(ws.id.belongs(workers)).update(status=action)
|
||||
|
||||
def disable(self, group_names=None, limit=None):
|
||||
def disable(self, group_names=None, limit=None, worker_name=None):
|
||||
"""Sets DISABLED on the workers processing `group_names` tasks.
|
||||
A DISABLED worker will be kept alive but it won't be able to process
|
||||
any waiting tasks, essentially putting it to sleep.
|
||||
@@ -1219,7 +1237,7 @@ class Scheduler(MetaScheduler):
|
||||
exclude=[DISABLED, KILL, TERMINATE],
|
||||
limit=limit)
|
||||
|
||||
def resume(self, group_names=None, limit=None):
|
||||
def resume(self, group_names=None, limit=None, worker_name=None):
|
||||
"""Wakes a worker up (it will be able to process queued tasks)"""
|
||||
self.set_worker_status(
|
||||
group_names=group_names,
|
||||
@@ -1227,7 +1245,7 @@ class Scheduler(MetaScheduler):
|
||||
exclude=[KILL, TERMINATE],
|
||||
limit=limit)
|
||||
|
||||
def terminate(self, group_names=None, limit=None):
|
||||
def terminate(self, group_names=None, limit=None, worker_name=None):
|
||||
"""Sets TERMINATE as worker status. The worker will wait for any
|
||||
currently running tasks to be executed and then it will exit gracefully
|
||||
"""
|
||||
@@ -1237,7 +1255,7 @@ class Scheduler(MetaScheduler):
|
||||
exclude=[KILL],
|
||||
limit=limit)
|
||||
|
||||
def kill(self, group_names=None, limit=None):
|
||||
def kill(self, group_names=None, limit=None, worker_name=None):
|
||||
"""Sets KILL as worker status. The worker will be killed even if it's
|
||||
processing a task."""
|
||||
self.set_worker_status(
|
||||
@@ -1312,7 +1330,7 @@ class Scheduler(MetaScheduler):
|
||||
have all fields == None
|
||||
|
||||
"""
|
||||
from gluon.dal import Query
|
||||
from gluon.dal.objects import Query
|
||||
sr, st = self.db.scheduler_run, self.db.scheduler_task
|
||||
if isinstance(ref, (int, long)):
|
||||
q = st.id == ref
|
||||
@@ -1335,7 +1353,7 @@ class Scheduler(MetaScheduler):
|
||||
**dict(orderby=orderby,
|
||||
left=left,
|
||||
limitby=(0, 1))
|
||||
).first()
|
||||
).first()
|
||||
if row and output:
|
||||
row.result = row.scheduler_run.run_result and \
|
||||
loads(row.scheduler_run.run_result,
|
||||
|
||||
+41
-23
@@ -11,12 +11,19 @@ from gluon.languages import lazyT
|
||||
import gluon.contrib.rss2 as rss2
|
||||
|
||||
try:
|
||||
import simplejson as json_parser # try external module
|
||||
# try external module
|
||||
import simplejson as json_parser
|
||||
except ImportError:
|
||||
try:
|
||||
import json as json_parser # try stdlib (Python >= 2.6)
|
||||
# try stdlib (Python >= 2.6)
|
||||
import json as json_parser
|
||||
except:
|
||||
import gluon.contrib.simplejson as json_parser # fallback to pure-Python module
|
||||
# fallback to pure-Python module
|
||||
import gluon.contrib.simplejson as json_parser
|
||||
|
||||
# simplejson >= 2.1.3 needs use_decimal = False
|
||||
# to stringify decimals
|
||||
decimal_false_option = json_parser.__version__.split('.') >= ['2', '1', '3']
|
||||
|
||||
have_yaml = True
|
||||
try:
|
||||
@@ -24,21 +31,23 @@ try:
|
||||
except ImportError:
|
||||
have_yaml = False
|
||||
|
||||
|
||||
def cast_keys(o, cast=str, encoding="utf-8"):
|
||||
""" Builds a new object with <cast> type keys
|
||||
|
||||
Arguments:
|
||||
o is the object input
|
||||
cast (defaults to str) is an object type or function
|
||||
which supports conversion such as:
|
||||
|
||||
>>> converted = cast(o)
|
||||
|
||||
encoding (defaults to utf-8) is the encoding for unicode
|
||||
keys. This is not used for custom cast functions
|
||||
|
||||
Use this funcion if you are in Python < 2.6.5
|
||||
"""
|
||||
Builds a new object with <cast> type keys.
|
||||
Use this function if you are in Python < 2.6.5
|
||||
This avoids syntax errors when unpacking dictionary arguments.
|
||||
|
||||
Args:
|
||||
o: is the object input
|
||||
cast: (defaults to str) is an object type or function
|
||||
which supports conversion such as:
|
||||
|
||||
converted = cast(o)
|
||||
|
||||
encoding: (defaults to utf-8) is the encoding for unicode
|
||||
keys. This is not used for custom cast functions
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(o, (dict, Storage)):
|
||||
@@ -65,6 +74,7 @@ def cast_keys(o, cast=str, encoding="utf-8"):
|
||||
newobj = o
|
||||
return newobj
|
||||
|
||||
|
||||
def loads_json(o, unicode_keys=True, **kwargs):
|
||||
# deserialize a json string
|
||||
result = json_parser.loads(o, **kwargs)
|
||||
@@ -74,6 +84,7 @@ def loads_json(o, unicode_keys=True, **kwargs):
|
||||
encoding=kwargs.get("encoding", "utf-8"))
|
||||
return result
|
||||
|
||||
|
||||
def custom_json(o):
|
||||
if hasattr(o, 'custom_json') and callable(o.custom_json):
|
||||
return o.custom_json()
|
||||
@@ -89,6 +100,8 @@ def custom_json(o):
|
||||
return str(o)
|
||||
elif isinstance(o, XmlComponent):
|
||||
return str(o)
|
||||
elif isinstance(o, set):
|
||||
return list(o)
|
||||
elif hasattr(o, 'as_list') and callable(o.as_list):
|
||||
return o.as_list()
|
||||
elif hasattr(o, 'as_dict') and callable(o.as_dict):
|
||||
@@ -118,12 +131,15 @@ def xml(value, encoding='UTF-8', key='document', quote=True):
|
||||
|
||||
|
||||
def json(value, default=custom_json):
|
||||
if decimal_false_option:
|
||||
value = json_parser.dumps(value, default=default, use_decimal=False)
|
||||
else:
|
||||
value = json_parser.dumps(value, default=default)
|
||||
|
||||
# replace JavaScript incompatible spacing
|
||||
# http://timelessrepo.com/json-isnt-a-javascript-subset
|
||||
return json_parser.dumps(value,
|
||||
default=default).replace(ur'\u2028',
|
||||
'\\u2028').replace(ur'\2029',
|
||||
'\\u2029')
|
||||
return value.replace(ur'\u2028', '\\u2028').replace(ur'\2029', '\\u2029')
|
||||
|
||||
|
||||
def csv(value):
|
||||
return ''
|
||||
@@ -131,7 +147,6 @@ def csv(value):
|
||||
|
||||
def ics(events, title=None, link=None, timeshift=0, calname=True,
|
||||
**ignored):
|
||||
import datetime
|
||||
title = title or '(unknown)'
|
||||
if link and not callable(link):
|
||||
link = lambda item, prefix=link: prefix.replace(
|
||||
@@ -183,9 +198,12 @@ def rss(feed):
|
||||
def yaml(data):
|
||||
if have_yaml:
|
||||
return yamlib.dump(data)
|
||||
else: raise ImportError("No YAML serializer available")
|
||||
else:
|
||||
raise ImportError("No YAML serializer available")
|
||||
|
||||
|
||||
def loads_yaml(data):
|
||||
if have_yaml:
|
||||
return yamlib.load(data)
|
||||
else: raise ImportError("No YAML serializer available")
|
||||
else:
|
||||
raise ImportError("No YAML serializer available")
|
||||
|
||||
+2
-2
@@ -28,7 +28,7 @@ from gluon.restricted import RestrictedError
|
||||
from gluon.globals import Request, Response, Session
|
||||
from gluon.storage import Storage, List
|
||||
from gluon.admin import w2p_unpack
|
||||
from gluon.dal import BaseAdapter
|
||||
from gluon.dal.base import BaseAdapter
|
||||
|
||||
logger = logging.getLogger("web2py")
|
||||
|
||||
@@ -129,7 +129,7 @@ def env(
|
||||
request.function)
|
||||
if global_settings.cmd_options:
|
||||
ip = global_settings.cmd_options.ip
|
||||
port = global_settings.cmd_options.port
|
||||
port = global_settings.cmd_options.port
|
||||
else:
|
||||
ip, port = '127.0.0.1', '8000'
|
||||
request.env.http_host = '%s:%s' % (ip,port)
|
||||
|
||||
+14
-1
@@ -12,4 +12,17 @@ Just for backward compatibility
|
||||
"""
|
||||
__all__ = ['DAL', 'Field', 'DRIVERS']
|
||||
|
||||
from dal import DAL, Field, Table, Query, Set, Expression, Row, Rows, DRIVERS, BaseAdapter, SQLField, SQLTable, SQLXorable, SQLQuery, SQLSet, SQLRows, SQLStorage, SQLDB, GQLDB, SQLALL, SQLCustomType
|
||||
from dal import DAL, Field, SQLCustomType
|
||||
from dal.adapters.base import BaseAdapter, DRIVERS
|
||||
from dal.objects import Table, Query, Set, Expression, Row, Rows
|
||||
from dal.helpers.classes import SQLALL
|
||||
|
||||
SQLDB = DAL
|
||||
GQLDB = DAL
|
||||
SQLField = Field
|
||||
SQLTable = Table
|
||||
SQLXorable = Expression
|
||||
SQLQuery = Query
|
||||
SQLSet = Set
|
||||
SQLRows = Rows
|
||||
SQLStorage = Row
|
||||
|
||||
+161
-98
@@ -14,28 +14,32 @@ Holds:
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
from gluon.http import HTTP
|
||||
from gluon.html import XmlComponent
|
||||
from gluon.html import XML, SPAN, TAG, A, DIV, CAT, UL, LI, TEXTAREA, BR, IMG, SCRIPT, P
|
||||
from gluon.html import FORM, INPUT, LABEL, OPTION, SELECT, COL, COLGROUP
|
||||
from gluon.html import TABLE, THEAD, TBODY, TR, TD, TH, STYLE, DEFAULT_PASSWORD_DISPLAY
|
||||
from gluon.html import URL, truncate_string, FIELDSET
|
||||
from gluon.dal import DAL, Field, Table, Row, CALLABLETYPES, smart_query, \
|
||||
bar_encode, Reference, Expression, SQLCustomType, sqlhtml_validators, \
|
||||
DEFAULT
|
||||
from gluon.storage import Storage
|
||||
from gluon.utils import md5_hash
|
||||
from gluon.validators import IS_EMPTY_OR, IS_NOT_EMPTY, IS_LIST_OF, IS_DATE, \
|
||||
IS_DATETIME, IS_INT_IN_RANGE, IS_FLOAT_IN_RANGE, IS_STRONG
|
||||
|
||||
import gluon.serializers as serializers
|
||||
import datetime
|
||||
import urllib
|
||||
import re
|
||||
import cStringIO
|
||||
|
||||
import os
|
||||
from gluon.http import HTTP, redirect
|
||||
from gluon.html import XmlComponent, truncate_string
|
||||
from gluon.html import XML, SPAN, TAG, A, DIV, CAT, UL, LI, TEXTAREA, BR, IMG
|
||||
from gluon.html import FORM, INPUT, LABEL, OPTION, SELECT, COL, COLGROUP
|
||||
from gluon.html import TABLE, THEAD, TBODY, TR, TD, TH, STYLE, SCRIPT
|
||||
from gluon.html import URL, FIELDSET, P, DEFAULT_PASSWORD_DISPLAY
|
||||
from gluon.dal import DAL, Field
|
||||
from gluon.dal.base import DEFAULT
|
||||
from gluon.dal.objects import Table, Row, Expression
|
||||
from gluon.dal.adapters.base import CALLABLETYPES
|
||||
from gluon.dal.helpers.methods import smart_query, bar_encode, sqlhtml_validators
|
||||
from gluon.dal.helpers.classes import Reference, SQLCustomType
|
||||
from gluon.storage import Storage
|
||||
from gluon.utils import md5_hash
|
||||
from gluon.validators import IS_EMPTY_OR, IS_NOT_EMPTY, IS_LIST_OF, IS_DATE
|
||||
from gluon.validators import IS_DATETIME, IS_INT_IN_RANGE, IS_FLOAT_IN_RANGE
|
||||
from gluon.validators import IS_STRONG
|
||||
|
||||
import gluon.serializers as serializers
|
||||
from gluon.globals import current
|
||||
from gluon.http import redirect
|
||||
|
||||
try:
|
||||
import gluon.settings as settings
|
||||
@@ -46,7 +50,7 @@ widget_class = re.compile('^\w*')
|
||||
|
||||
|
||||
def add_class(a, b):
|
||||
return a+' '+b if a else b
|
||||
return a + ' ' + b if a else b
|
||||
|
||||
|
||||
def represent(field, value, record):
|
||||
@@ -82,11 +86,11 @@ def show_if(cond):
|
||||
if not cond:
|
||||
return None
|
||||
base = "%s_%s" % (cond.first.tablename, cond.first.name)
|
||||
if ((cond.op.__name__ == 'EQ' and cond.second == True) or
|
||||
(cond.op.__name__ == 'NE' and cond.second == False)):
|
||||
if ((cond.op.__name__ == 'EQ' and cond.second is True) or
|
||||
(cond.op.__name__ == 'NE' and cond.second is False)):
|
||||
return base, ":checked"
|
||||
if ((cond.op.__name__ == 'EQ' and cond.second == False) or
|
||||
(cond.op.__name__ == 'NE' and cond.second == True)):
|
||||
if ((cond.op.__name__ == 'EQ' and cond.second is False) or
|
||||
(cond.op.__name__ == 'NE' and cond.second is True)):
|
||||
return base, ":not(:checked)"
|
||||
if cond.op.__name__ == 'EQ':
|
||||
return base, "[value='%s']" % cond.second
|
||||
@@ -302,7 +306,8 @@ class ListWidget(StringWidget):
|
||||
_class = 'string'
|
||||
requires = field.requires if isinstance(
|
||||
field.requires, (IS_NOT_EMPTY, IS_LIST_OF)) else None
|
||||
if isinstance(value, str): value = [value]
|
||||
if isinstance(value, str):
|
||||
value = [value]
|
||||
nvalue = value or ['']
|
||||
items = [LI(INPUT(_id=_id, _class=_class, _name=_name,
|
||||
value=v, hideerror=k < len(nvalue) - 1,
|
||||
@@ -348,7 +353,6 @@ class RadioWidget(OptionsWidget):
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
|
||||
attr = cls._attributes(field, {}, **attributes)
|
||||
attr['_class'] = add_class(attr.get('_class'), 'web2py_radiowidget')
|
||||
|
||||
@@ -623,7 +627,7 @@ class AutocompleteWidget(object):
|
||||
self.help_fields = help_fields or []
|
||||
self.help_string = help_string
|
||||
if self.help_fields and not self.help_string:
|
||||
self.help_string = ' '.join('%%(%s)s'%f.name
|
||||
self.help_string = ' '.join('%%(%s)s' % f.name
|
||||
for f in self.help_fields)
|
||||
|
||||
self.request = request
|
||||
@@ -897,7 +901,7 @@ def formstyle_bootstrap3_inline_factory(col_label_size=3):
|
||||
label['_for'] = None
|
||||
label.insert(0, controls)
|
||||
_controls = DIV(DIV(label, _help, _class="checkbox"),
|
||||
_class="%s %s" % (offset_class, col_class))
|
||||
_class="%s %s" % (offset_class, col_class))
|
||||
label = ''
|
||||
elif isinstance(controls, SELECT):
|
||||
controls.add_class('form-control')
|
||||
@@ -1129,7 +1133,7 @@ class SQLFORM(FORM):
|
||||
if fieldname.find('.') >= 0:
|
||||
continue
|
||||
|
||||
field = (self.table[fieldname] if fieldname in self.table.fields
|
||||
field = (self.table[fieldname] if fieldname in self.table.fields
|
||||
else self.extra_fields[fieldname])
|
||||
comment = None
|
||||
|
||||
@@ -1181,7 +1185,7 @@ class SQLFORM(FORM):
|
||||
cond = readonly or \
|
||||
(not ignore_rw and not field.writable and field.readable)
|
||||
|
||||
if default is not None and not cond:
|
||||
if default is not None and not cond:
|
||||
default = field.formatter(default)
|
||||
|
||||
dspval = default
|
||||
@@ -1189,10 +1193,6 @@ class SQLFORM(FORM):
|
||||
|
||||
if cond:
|
||||
|
||||
# ## if field.re field.requires = sqlhtml_validators(field) field.requires = sqlhtml_validators(field)present is available else
|
||||
# ## ignore blob and preview uploaded images
|
||||
# ## format everything else
|
||||
|
||||
if field.represent:
|
||||
inp = represent(field, default, record)
|
||||
elif field.type in ['blob']:
|
||||
@@ -1204,6 +1204,13 @@ class SQLFORM(FORM):
|
||||
field, default, _disabled=True)
|
||||
else:
|
||||
inp = field.formatter(default)
|
||||
if getattr(field, 'show_if', None):
|
||||
if not isinstance(inp, DIV):
|
||||
# Create a container for string represents
|
||||
inp = DIV(inp, _id='%s_%s' % (field.tablename, field.name))
|
||||
trigger, cond = show_if(field.show_if)
|
||||
inp['_data-show-trigger'] = trigger
|
||||
inp['_data-show-if'] = cond
|
||||
elif field.type == 'upload':
|
||||
if field.widget:
|
||||
inp = field.widget(field, default, upload)
|
||||
@@ -1269,7 +1276,7 @@ class SQLFORM(FORM):
|
||||
(olname.replace('.', '__') + SQLFORM.ID_ROW_SUFFIX,
|
||||
'', widget, col3.get(olname, '')))
|
||||
self.custom.linkto[olname.replace('.', '__')] = widget
|
||||
# </block>
|
||||
# </block>
|
||||
|
||||
# when deletable, add delete? checkbox
|
||||
self.custom.delete = self.custom.deletable = ''
|
||||
@@ -1288,16 +1295,15 @@ class SQLFORM(FORM):
|
||||
)
|
||||
xfields.append(
|
||||
(self.FIELDKEY_DELETE_RECORD + SQLFORM.ID_ROW_SUFFIX,
|
||||
LABEL(
|
||||
LABEL(
|
||||
T(delete_label), sep,
|
||||
_for=self.FIELDKEY_DELETE_RECORD,
|
||||
_id=self.FIELDKEY_DELETE_RECORD + \
|
||||
SQLFORM.ID_LABEL_SUFFIX),
|
||||
_id=self.FIELDKEY_DELETE_RECORD +
|
||||
SQLFORM.ID_LABEL_SUFFIX),
|
||||
widget,
|
||||
col3.get(self.FIELDKEY_DELETE_RECORD, '')))
|
||||
self.custom.delete = self.custom.deletable = widget
|
||||
|
||||
|
||||
# when writable, add submit button
|
||||
self.custom.submit = ''
|
||||
if not readonly:
|
||||
@@ -1477,7 +1483,7 @@ class SQLFORM(FORM):
|
||||
# that does not pass validation, yet it should be deleted
|
||||
for fieldname in self.fields:
|
||||
|
||||
field = (self.table[fieldname]
|
||||
field = (self.table[fieldname]
|
||||
if fieldname in self.table.fields
|
||||
else self.extra_fields[fieldname])
|
||||
### this is a workaround! widgets should always have default not None!
|
||||
@@ -1795,7 +1801,7 @@ class SQLFORM(FORM):
|
||||
# treat ftype 'decimal' as 'double'
|
||||
# (this fixes problems but needs refactoring!
|
||||
if isinstance(field.type, SQLCustomType):
|
||||
ftype = field.type.type.split(' ')[0]
|
||||
ftype = field.type.type.split(' ')[0]
|
||||
else:
|
||||
ftype = field.type.split(' ')[0]
|
||||
if ftype.startswith('decimal'): ftype = 'double'
|
||||
@@ -1807,37 +1813,38 @@ class SQLFORM(FORM):
|
||||
label = isinstance(
|
||||
field.label, str) and T(field.label) or field.label
|
||||
selectfields.append(OPTION(label, _value=str(field)))
|
||||
# At web2py level SQLCustomType field values are treated as normal web2py types
|
||||
if isinstance(field.type, SQLCustomType):
|
||||
field_type = field.type.type
|
||||
else:
|
||||
field_type = field.type
|
||||
|
||||
operators = SELECT(*[OPTION(T(option), _value=option) for option in options],_class='form-control')
|
||||
_id = "%s_%s" % (value_id, name)
|
||||
if field.type == 'boolean':
|
||||
value_input = SQLFORM.widgets.boolean.widget(field, field.default, _id=_id,_class='form-control')
|
||||
elif field.type == 'double':
|
||||
value_input = SQLFORM.widgets.double.widget(field, field.default, _id=_id,_class='form-control')
|
||||
elif field.type == 'time':
|
||||
value_input = SQLFORM.widgets.time.widget(field, field.default, _id=_id,_class='form-control')
|
||||
elif field.type == 'date':
|
||||
if field_type in ['boolean', 'double', 'time', 'integer']:
|
||||
value_input = SQLFORM.widgets[field_type].widget(field, field.default, _id=_id,_class='form-control')
|
||||
elif field_type == 'date':
|
||||
iso_format = {'_data-w2p_date_format' : '%Y-%m-%d'}
|
||||
value_input = SQLFORM.widgets.date.widget(field, field.default, _id=_id,_class='form-control', **iso_format)
|
||||
elif field.type == 'datetime':
|
||||
elif field_type == 'datetime':
|
||||
iso_format = {'_data-w2p_datetime_format' : '%Y-%m-%d %H:%M:%S'}
|
||||
value_input = SQLFORM.widgets.datetime.widget(field, field.default, _id=_id,_class='form-control', **iso_format)
|
||||
elif (field.type.startswith('reference ') or
|
||||
field.type.startswith('list:reference ')) and \
|
||||
elif (field_type.startswith('reference ') or
|
||||
field_type.startswith('list:reference ')) and \
|
||||
hasattr(field.requires, 'options'):
|
||||
value_input = SELECT(
|
||||
*[OPTION(v, _value=k)
|
||||
for k, v in field.requires.options()],
|
||||
_class='form-control',
|
||||
**dict(_id=_id))
|
||||
elif field.type == 'integer' or \
|
||||
field.type.startswith('reference ') or \
|
||||
field.type.startswith('list:integer') or \
|
||||
field.type.startswith('list:reference '):
|
||||
elif field_type.startswith('reference ') or \
|
||||
field_type.startswith('list:integer') or \
|
||||
field_type.startswith('list:reference '):
|
||||
value_input = SQLFORM.widgets.integer.widget(field, field.default, _id=_id,_class='form-control')
|
||||
else:
|
||||
value_input = INPUT(
|
||||
_type='text', _id=_id,
|
||||
_class=(field.type or '')+' form-control')
|
||||
_type='text', _id=_id,
|
||||
_class="%s %s" % ((field_type or ''), 'form-control'))
|
||||
|
||||
new_button = INPUT(
|
||||
_type="button", _value=T('New Search'), _class="btn btn-default", _title=T('Start building a new search'),
|
||||
@@ -1872,7 +1879,15 @@ class SQLFORM(FORM):
|
||||
function %(prefix)s_build_query(aggregator,a) {
|
||||
var b=a.replace('.','-');
|
||||
var option = jQuery('#%(field_id)s_'+b+' select').val();
|
||||
var value = jQuery('#%(value_id)s_'+b).val().replace('"','\\\\"');
|
||||
var value;
|
||||
var $value_item = jQuery('#%(value_id)s_'+b);
|
||||
if ($value_item.is(':checkbox')){
|
||||
if ($value_item.is(':checked'))
|
||||
value = 'True';
|
||||
else value = 'False';
|
||||
}
|
||||
else
|
||||
{ value = $value_item.val().replace('"','\\\\"')}
|
||||
var s=a+' '+option+' "'+value+'"';
|
||||
var k=jQuery('#%(keywords_id)s');
|
||||
var v=k.val();
|
||||
@@ -1972,9 +1987,9 @@ class SQLFORM(FORM):
|
||||
buttonback='icon leftarrow icon-arrow-left glyphicon glyphicon-arrow-left',
|
||||
buttonexport='icon downarrow icon-download glyphicon glyphicon-download',
|
||||
buttondelete='icon trash icon-trash glyphicon glyphicon-trash',
|
||||
buttonedit='icon pen icon-pencil glyphicon glyphicon-arrow-pencil',
|
||||
buttonedit='icon pen icon-pencil glyphicon glyphicon-pencil',
|
||||
buttontable='icon rightarrow icon-arrow-right glyphicon glyphicon-arrow-right',
|
||||
buttonview='icon magnifier icon-zoom-in glyphicon glyphicon-arrow-zoom-in',
|
||||
buttonview='icon magnifier icon-zoom-in glyphicon glyphicon-zoom-in',
|
||||
)
|
||||
elif not isinstance(ui, dict):
|
||||
raise RuntimeError('SQLFORM.grid ui argument must be a dictionary')
|
||||
@@ -1992,6 +2007,16 @@ class SQLFORM(FORM):
|
||||
details = details and not groupby
|
||||
rows = None
|
||||
|
||||
# see issue 1980. Basically we can have keywords in get_vars
|
||||
# (i.e. when the search term is propagated through page=2&keywords=abc)
|
||||
# but if there is keywords in post_vars (i.e. POSTing a search request)
|
||||
# the one in get_vars should be replaced by the new one
|
||||
keywords = ''
|
||||
if 'keywords' in request.post_vars:
|
||||
keywords = request.post_vars.keywords
|
||||
elif 'keywords' in request.get_vars:
|
||||
keywords = request.get_vars.keywords
|
||||
|
||||
def fetch_count(dbset):
|
||||
##FIXME for google:datastore cache_count is ignored
|
||||
## if it's not an integer
|
||||
@@ -2048,7 +2073,7 @@ class SQLFORM(FORM):
|
||||
'/'.join(str(a) for a in args) == '/'.join(request.args) or
|
||||
URL.verify(request, user_signature=user_signature,
|
||||
hash_vars=False) or
|
||||
(request.args(len(args))=='view' and not logged)):
|
||||
(request.args(len(args))=='view' and not logged)):
|
||||
session.flash = T('not authorized')
|
||||
redirect(referrer)
|
||||
|
||||
@@ -2093,8 +2118,8 @@ class SQLFORM(FORM):
|
||||
else:
|
||||
fields = []
|
||||
columns = []
|
||||
filter1 = lambda f:isinstance(f, Field)
|
||||
filter2 = lambda f:isinstance(f, Field) and f.readable
|
||||
filter1 = lambda f: isinstance(f, Field)
|
||||
filter2 = lambda f: isinstance(f, Field) and f.readable
|
||||
for table in tables:
|
||||
fields += filter(filter1, table)
|
||||
columns += filter(filter2, table)
|
||||
@@ -2108,15 +2133,18 @@ class SQLFORM(FORM):
|
||||
if groupby is None:
|
||||
field_id = tables[0]._id
|
||||
elif groupby and isinstance(groupby, Field):
|
||||
field_id = groupby #take the field passed as groupby
|
||||
#take the field passed as groupby
|
||||
field_id = groupby
|
||||
elif groupby and isinstance(groupby, Expression):
|
||||
field_id = groupby.first #take the first groupby field
|
||||
while not(isinstance(field_id, Field)): # Navigate to the first Field of the expression
|
||||
#take the first groupby field
|
||||
field_id = groupby.first
|
||||
while not(isinstance(field_id, Field)):
|
||||
# Navigate to the first Field of the expression
|
||||
field_id = field_id.first
|
||||
table = field_id.table
|
||||
tablename = table._tablename
|
||||
if not any(str(f) == str(field_id) for f in fields):
|
||||
fields = [f for f in fields]+[field_id]
|
||||
fields = [f for f in fields] + [field_id]
|
||||
if upload == '<default>':
|
||||
upload = lambda filename: url(args=['download', filename])
|
||||
if request.args(-2) == 'download':
|
||||
@@ -2280,8 +2308,10 @@ class SQLFORM(FORM):
|
||||
expcolumns = [str(f) for f in columns]
|
||||
selectable_columns = [str(f) for f in columns if not isinstance(f, Field.Virtual)]
|
||||
if export_type.endswith('with_hidden_cols'):
|
||||
#expcolumns = [] start with the visible columns, which includes visible virtual fields
|
||||
selectable_columns = [] #like expcolumns but excluding virtual
|
||||
# expcolumns = [] start with the visible columns, which
|
||||
# includes visible virtual fields
|
||||
selectable_columns = []
|
||||
#like expcolumns but excluding virtual
|
||||
for table in tables:
|
||||
for field in table:
|
||||
if field.readable and field.tablename in tablenames:
|
||||
@@ -2289,19 +2319,25 @@ class SQLFORM(FORM):
|
||||
expcolumns.append(str(field))
|
||||
if not(isinstance(field, Field.Virtual)):
|
||||
selectable_columns.append(str(field))
|
||||
#look for virtual fields not displayed (and virtual method fields to be added here?)
|
||||
#look for virtual fields not displayed (and virtual method
|
||||
#fields to be added here?)
|
||||
for (field_name, field) in table.iteritems():
|
||||
if isinstance(field, Field.Virtual) and not str(field) in expcolumns:
|
||||
expcolumns.append(str(field))
|
||||
|
||||
if export_type in exportManager and exportManager[export_type]:
|
||||
if request.vars.keywords:
|
||||
if keywords:
|
||||
try:
|
||||
#the query should be constructed using searchable fields but not virtual fields
|
||||
#the query should be constructed using searchable
|
||||
#fields but not virtual fields
|
||||
sfields = reduce(lambda a, b: a + b,
|
||||
[[f for f in t if f.readable and not isinstance(f, Field.Virtual)] for t in tables])
|
||||
dbset = dbset(SQLFORM.build_query(
|
||||
sfields, request.vars.get('keywords', '')))
|
||||
#use custom_query using searchable
|
||||
if callable(searchable):
|
||||
dbset = dbset(searchable(sfields, keywords))
|
||||
else:
|
||||
dbset = dbset(SQLFORM.build_query(
|
||||
sfields, keywords))
|
||||
rows = dbset.select(left=left, orderby=orderby,
|
||||
cacheable=True, *selectable_columns)
|
||||
except Exception, e:
|
||||
@@ -2313,9 +2349,12 @@ class SQLFORM(FORM):
|
||||
|
||||
value = exportManager[export_type]
|
||||
clazz = value[0] if hasattr(value, '__getitem__') else value
|
||||
rows.colnames = expcolumns # expcolumns is all cols to be exported including virtual fields
|
||||
# expcolumns is all cols to be exported including virtual fields
|
||||
rows.colnames = expcolumns
|
||||
oExp = clazz(rows)
|
||||
filename = '.'.join(('rows', oExp.file_ext))
|
||||
export_filename = \
|
||||
request.vars.get('_export_filename') or 'rows'
|
||||
filename = '.'.join((export_filename, oExp.file_ext))
|
||||
response.headers['Content-Type'] = oExp.content_type
|
||||
response.headers['Content-Disposition'] = \
|
||||
'attachment;filename=' + filename + ';'
|
||||
@@ -2327,8 +2366,7 @@ class SQLFORM(FORM):
|
||||
elif not request.vars.records:
|
||||
request.vars.records = []
|
||||
|
||||
session['_web2py_grid_referrer_' + formname] = \
|
||||
url2(vars=request.get_vars)
|
||||
session['_web2py_grid_referrer_' + formname] = url2(vars=request.get_vars)
|
||||
console = DIV(_class='web2py_console %(header)s %(cornertop)s' % ui)
|
||||
error = None
|
||||
if create:
|
||||
@@ -2353,19 +2391,22 @@ class SQLFORM(FORM):
|
||||
spanel_id = '%s_query_fields' % prefix
|
||||
sfields_id = '%s_query_panel' % prefix
|
||||
skeywords_id = '%s_keywords' % prefix
|
||||
## hidden fields to presever keywords in url after the submit
|
||||
hidden_fields = [INPUT(_type='hidden', _value=value, _name=key) for key, value in request.get_vars.items() if key not in ['keywords', 'page']]
|
||||
search_widget = lambda sfield, url: CAT(FORM(
|
||||
INPUT(_name='keywords', _value=request.vars.keywords,
|
||||
INPUT(_name='keywords', _value=keywords,
|
||||
_id=skeywords_id,_class='form-control',
|
||||
_onfocus="jQuery('#%s').change();jQuery('#%s').slideDown();" % (spanel_id, sfields_id) if advanced_search else ''
|
||||
),
|
||||
INPUT(_type='submit', _value=T('Search'), _class="btn btn-default"),
|
||||
INPUT(_type='submit', _value=T('Clear'), _class="btn btn-default",
|
||||
_onclick="jQuery('#%s').val('');" % skeywords_id),
|
||||
*hidden_fields,
|
||||
_method="GET", _action=url), search_menu)
|
||||
# TODO vars from the url should be removed, they are not used by the submit
|
||||
form = search_widget and search_widget(sfields, url()) or ''
|
||||
console.append(add)
|
||||
console.append(form)
|
||||
keywords = request.vars.get('keywords', '')
|
||||
try:
|
||||
if callable(searchable):
|
||||
subquery = searchable(sfields, keywords)
|
||||
@@ -2425,7 +2466,7 @@ class SQLFORM(FORM):
|
||||
elif key == ordermatch[1:]:
|
||||
marker = sorter_icons[1]
|
||||
header = A(header, marker, _href=url(vars=dict(
|
||||
keywords=request.vars.keywords or '',
|
||||
keywords=keywords,
|
||||
order=key)), cid=request.cid)
|
||||
headcols.append(TH(header, _class=ui.get('default')))
|
||||
|
||||
@@ -2463,17 +2504,22 @@ class SQLFORM(FORM):
|
||||
if paginate and dbset._db._adapter.dbengine == 'google:datastore':
|
||||
cursor = request.vars.cursor or True
|
||||
limitby = (0, paginate)
|
||||
try: page = int(request.vars.page or 1)-1
|
||||
except ValueError: page = 0
|
||||
elif paginate and paginate<nrows:
|
||||
try: page = int(request.vars.page or 1)-1
|
||||
except ValueError: page = 0
|
||||
limitby = (paginate*page, paginate*(page+1))
|
||||
try:
|
||||
page = int(request.vars.page or 1) - 1
|
||||
except ValueError:
|
||||
page = 0
|
||||
elif paginate and paginate < nrows:
|
||||
try:
|
||||
page = int(request.vars.page or 1) - 1
|
||||
except ValueError:
|
||||
page = 0
|
||||
limitby = (paginate * page, paginate * (page + 1))
|
||||
else:
|
||||
limitby = None
|
||||
try:
|
||||
table_fields = [field for field in fields
|
||||
if (field.tablename in tablenames and not(isinstance(field, Field.Virtual)))]
|
||||
if (field.tablename in tablenames and
|
||||
not(isinstance(field, Field.Virtual)))]
|
||||
if dbset._db._adapter.dbengine == 'google:datastore':
|
||||
rows = dbset.select(left=left, orderby=orderby,
|
||||
groupby=groupby, limitby=limitby,
|
||||
@@ -2511,8 +2557,15 @@ class SQLFORM(FORM):
|
||||
paginator.append(LI('page %s' % (page+1)))
|
||||
if next_cursor:
|
||||
d = dict(page=page+2, cursor=next_cursor)
|
||||
if order: d['order'] = order
|
||||
if request.vars.keywords: d['keywords'] = request.vars.keywords
|
||||
if order:
|
||||
d['order'] = order
|
||||
# see issue 1980, also at the top of the definition
|
||||
# if keyworkds is in request.vars, we don't need to
|
||||
# copy over the keywords parameter in the links for pagination
|
||||
if 'keywords' in request.vars and not keywords:
|
||||
d['keywords'] = ''
|
||||
elif keywords:
|
||||
d['keywords'] = keywords
|
||||
paginator.append(LI(
|
||||
A('next', _href=url(vars=d), cid=request.cid)))
|
||||
elif paginate and paginate < nrows:
|
||||
@@ -2528,8 +2581,13 @@ class SQLFORM(FORM):
|
||||
d = dict(page=p + 1)
|
||||
if order:
|
||||
d['order'] = order
|
||||
if request.vars.keywords:
|
||||
d['keywords'] = request.vars.keywords
|
||||
# see issue 1980, also at the top of the definition
|
||||
# if keyworkds is in request.vars, we don't need to
|
||||
# copy over the keywords parameter in the links for pagination
|
||||
if 'keywords' in request.vars and not keywords:
|
||||
d['keywords'] = ''
|
||||
elif keywords:
|
||||
d['keywords'] = keywords
|
||||
return A(name, _href=url(vars=d), cid=request.cid)
|
||||
NPAGES = 5 # window is 2*NPAGES
|
||||
if page > NPAGES + 1:
|
||||
@@ -2657,7 +2715,8 @@ class SQLFORM(FORM):
|
||||
_style='width:100%;overflow-x:auto;-ms-overflow-x:scroll')
|
||||
if selectable:
|
||||
if not callable(selectable):
|
||||
#now expect that selectable and related parameters are iterator (list, tuple, etc)
|
||||
#now expect that selectable and related parameters are
|
||||
#iterator (list, tuple, etc)
|
||||
inputs = []
|
||||
for i, submit_info in enumerate(selectable):
|
||||
submit_text = submit_info[0]
|
||||
@@ -2705,7 +2764,7 @@ class SQLFORM(FORM):
|
||||
link = url2(vars=dict(
|
||||
order=request.vars.order or '',
|
||||
_export_type=k,
|
||||
keywords=request.vars.keywords or ''))
|
||||
keywords=keywords or ''))
|
||||
export_links.append(A(T(label), _href=link, _title=title, _class='btn btn-default'))
|
||||
export_menu = \
|
||||
DIV(T('Export:'), _class="w2p_export_menu", *export_links)
|
||||
@@ -2794,7 +2853,7 @@ class SQLFORM(FORM):
|
||||
elif callable(table._format):
|
||||
return table._format(row)
|
||||
else:
|
||||
return '#'+str(row.id)
|
||||
return '#' + str(row.id)
|
||||
try:
|
||||
nargs = len(args) + 1
|
||||
previous_tablename, previous_fieldname, previous_id = \
|
||||
@@ -2873,7 +2932,7 @@ class SQLFORM(FORM):
|
||||
opts = [OPTION(T('References')+':', _value='')]
|
||||
linked = []
|
||||
if linked_tables:
|
||||
for item in linked_tables:
|
||||
for item in linked_tables:
|
||||
tb = None
|
||||
if isinstance(item, Table) and item._tablename in check:
|
||||
tablename = item._tablename
|
||||
@@ -2998,12 +3057,16 @@ class SQLTABLE(TABLE):
|
||||
return
|
||||
REGEX_TABLE_DOT_FIELD = sqlrows.db._adapter.REGEX_TABLE_DOT_FIELD
|
||||
if not columns:
|
||||
columns = [c for c in sqlrows.colnames if REGEX_TABLE_DOT_FIELD.match(c)]
|
||||
columns = list(sqlrows.colnames)
|
||||
if headers == 'fieldname:capitalize':
|
||||
headers = {}
|
||||
for c in columns:
|
||||
(t, f) = REGEX_TABLE_DOT_FIELD.match(c).groups()
|
||||
headers[t + '.' + f] = f.replace('_', ' ').title()
|
||||
tfmatch=REGEX_TABLE_DOT_FIELD.match(c)
|
||||
if tfmatch:
|
||||
(t, f) = REGEX_TABLE_DOT_FIELD.match(c).groups()
|
||||
headers[t + '.' + f] = f.replace('_', ' ').title()
|
||||
else:
|
||||
headers[c]=c
|
||||
elif headers == 'labels':
|
||||
headers = {}
|
||||
for c in columns:
|
||||
@@ -3022,7 +3085,7 @@ class SQLTABLE(TABLE):
|
||||
headers = {}
|
||||
else:
|
||||
for c in columns: # new implement dict
|
||||
c = '.'.join(REGEX_TABLE_DOT_FIELD.match(c).groups())
|
||||
c = str(c)
|
||||
if isinstance(headers.get(c, c), dict):
|
||||
coldict = headers.get(c, c)
|
||||
attrcol = dict()
|
||||
|
||||
+3
-3
@@ -151,10 +151,10 @@ class StorageList(Storage):
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key in self:
|
||||
return getattr(self, key)
|
||||
return self.get(key)
|
||||
else:
|
||||
r = []
|
||||
setattr(self, key, r)
|
||||
self[key] = r
|
||||
return r
|
||||
|
||||
|
||||
@@ -195,7 +195,7 @@ class Messages(Settings):
|
||||
def __getattr__(self, key):
|
||||
value = self[key]
|
||||
if isinstance(value, str):
|
||||
return str(self.T(value))
|
||||
return self.T(value)
|
||||
return value
|
||||
|
||||
class FastStorage(dict):
|
||||
|
||||
+1
-1
@@ -279,7 +279,7 @@ class TemplateParser(object):
|
||||
self.context = context
|
||||
|
||||
# allow optional alternative delimiters
|
||||
|
||||
|
||||
if delimiters != self.default_delimiters:
|
||||
escaped_delimiters = (escape(delimiters[0]),
|
||||
escape(delimiters[1]))
|
||||
|
||||
+15
-10
@@ -1,27 +1,32 @@
|
||||
import os, sys
|
||||
|
||||
import os
|
||||
import sys
|
||||
from test_http import *
|
||||
from test_cache import *
|
||||
|
||||
NOSQL = any([name in (os.getenv("DB") or "")
|
||||
for name in ("datastore", "mongodb", "imap")])
|
||||
if NOSQL:
|
||||
from test_dal_nosql import *
|
||||
else:
|
||||
from test_dal import *
|
||||
|
||||
from test_contenttype import *
|
||||
from test_fileutils import *
|
||||
from test_html import *
|
||||
from test_is_url import *
|
||||
from test_languages import *
|
||||
from test_router import *
|
||||
from test_routes import *
|
||||
from test_storage import *
|
||||
from test_serializers import *
|
||||
from test_template import *
|
||||
from test_validators import *
|
||||
from test_utils import *
|
||||
from test_contribs import *
|
||||
from test_web import *
|
||||
from test_tools import *
|
||||
|
||||
|
||||
if sys.version[:3] == '2.7':
|
||||
from test_old_doctests import *
|
||||
|
||||
|
||||
NOSQL = any([name in (os.getenv("DB") or "")
|
||||
for name in ("datastore", "mongodb", "imap")])
|
||||
|
||||
if NOSQL:
|
||||
from test_dal_nosql import *
|
||||
else:
|
||||
from test_dal import *
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import sys
|
||||
|
||||
def fix_sys_path(current_path):
|
||||
"""
|
||||
logic to have always the correct sys.path
|
||||
'', web2py/gluon, web2py/site-packages, web2py/ ...
|
||||
"""
|
||||
|
||||
def add_path_first(path):
|
||||
sys.path = [path] + [p for p in sys.path if (
|
||||
not p == path and not p == (path + '/'))]
|
||||
|
||||
path = os.path.dirname(os.path.abspath(current_path))
|
||||
|
||||
if not os.path.isfile(os.path.join(path,'web2py.py')):
|
||||
i = 0
|
||||
while i<10:
|
||||
i += 1
|
||||
if os.path.exists(os.path.join(path,'web2py.py')):
|
||||
break
|
||||
path = os.path.abspath(os.path.join(path, '..'))
|
||||
|
||||
paths = [path,
|
||||
os.path.abspath(os.path.join(path, 'site-packages')),
|
||||
os.path.abspath(os.path.join(path, 'gluon')),
|
||||
'']
|
||||
[add_path_first(path) for path in paths]
|
||||
+47
-33
@@ -4,43 +4,15 @@
|
||||
"""
|
||||
Unit tests for gluon.cache
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
from fix_path import fix_sys_path
|
||||
|
||||
|
||||
def fix_sys_path():
|
||||
"""
|
||||
logic to have always the correct sys.path
|
||||
'', web2py/gluon, web2py/site-packages, web2py/ ...
|
||||
"""
|
||||
|
||||
def add_path_first(path):
|
||||
sys.path = [path] + [p for p in sys.path if (
|
||||
not p == path and not p == (path + '/'))]
|
||||
|
||||
path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
if not os.path.isfile(os.path.join(path,'web2py.py')):
|
||||
i = 0
|
||||
while i<10:
|
||||
i += 1
|
||||
if os.path.exists(os.path.join(path,'web2py.py')):
|
||||
break
|
||||
path = os.path.abspath(os.path.join(path, '..'))
|
||||
|
||||
paths = [path,
|
||||
os.path.abspath(os.path.join(path, 'site-packages')),
|
||||
os.path.abspath(os.path.join(path, 'gluon')),
|
||||
'']
|
||||
[add_path_first(path) for path in paths]
|
||||
|
||||
fix_sys_path()
|
||||
fix_sys_path(__file__)
|
||||
|
||||
|
||||
from storage import Storage
|
||||
from cache import CacheInRam, CacheOnDisk
|
||||
from cache import CacheInRam, CacheOnDisk, Cache
|
||||
|
||||
oldcwd = None
|
||||
|
||||
@@ -65,7 +37,6 @@ class TestCache(unittest.TestCase):
|
||||
def testCacheInRam(self):
|
||||
|
||||
# defaults to mode='http'
|
||||
|
||||
cache = CacheInRam()
|
||||
self.assertEqual(cache('a', lambda: 1, 0), 1)
|
||||
self.assertEqual(cache('a', lambda: 2, 100), 1)
|
||||
@@ -76,11 +47,24 @@ class TestCache(unittest.TestCase):
|
||||
cache.clear()
|
||||
self.assertEqual(cache('a', lambda: 3, 100), 3)
|
||||
self.assertEqual(cache('a', lambda: 4, 0), 4)
|
||||
#test singleton behaviour
|
||||
cache = CacheInRam()
|
||||
cache.clear()
|
||||
self.assertEqual(cache('a', lambda: 3, 100), 3)
|
||||
self.assertEqual(cache('a', lambda: 4, 0), 4)
|
||||
#test key deletion
|
||||
cache('a', None)
|
||||
self.assertEqual(cache('a', lambda: 5, 100), 5)
|
||||
#test increment
|
||||
self.assertEqual(cache.increment('a'), 6)
|
||||
self.assertEqual(cache('a', lambda: 1, 100), 6)
|
||||
cache.increment('b')
|
||||
self.assertEqual(cache('b', lambda: 'x', 100), 1)
|
||||
|
||||
|
||||
def testCacheOnDisk(self):
|
||||
|
||||
# defaults to mode='http'
|
||||
|
||||
s = Storage({'application': 'admin',
|
||||
'folder': 'applications/admin'})
|
||||
cache = CacheOnDisk(s)
|
||||
@@ -93,7 +77,37 @@ class TestCache(unittest.TestCase):
|
||||
cache.clear()
|
||||
self.assertEqual(cache('a', lambda: 3, 100), 3)
|
||||
self.assertEqual(cache('a', lambda: 4, 0), 4)
|
||||
#test singleton behaviour
|
||||
cache = CacheOnDisk(s)
|
||||
cache.clear()
|
||||
self.assertEqual(cache('a', lambda: 3, 100), 3)
|
||||
self.assertEqual(cache('a', lambda: 4, 0), 4)
|
||||
#test key deletion
|
||||
cache('a', None)
|
||||
self.assertEqual(cache('a', lambda: 5, 100), 5)
|
||||
#test increment
|
||||
self.assertEqual(cache.increment('a'), 6)
|
||||
self.assertEqual(cache('a', lambda: 1, 100), 6)
|
||||
cache.increment('b')
|
||||
self.assertEqual(cache('b', lambda: 'x', 100), 1)
|
||||
|
||||
def testCacheWithPrefix(self):
|
||||
s = Storage({'application': 'admin',
|
||||
'folder': 'applications/admin'})
|
||||
cache = Cache(s)
|
||||
prefix = cache.with_prefix(cache.ram,'prefix')
|
||||
self.assertEqual(prefix('a', lambda: 1, 0), 1)
|
||||
self.assertEqual(prefix('a', lambda: 2, 100), 1)
|
||||
self.assertEqual(cache.ram('prefixa', lambda: 2, 100), 1)
|
||||
|
||||
def testRegex(self):
|
||||
cache = CacheInRam()
|
||||
self.assertEqual(cache('a1', lambda: 1, 0), 1)
|
||||
self.assertEqual(cache('a2', lambda: 2, 100), 2)
|
||||
cache.clear(regex=r'a*')
|
||||
self.assertEqual(cache('a1', lambda: 2, 0), 2)
|
||||
self.assertEqual(cache('a2', lambda: 3, 100), 3)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
setUpModule() # pre-python-2.7
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user