more docstring fixes

This commit is contained in:
niphlod
2014-01-29 23:01:52 +01:00
parent ce2d958f9a
commit 41a4de081f
8 changed files with 189 additions and 109 deletions

View File

@@ -268,7 +268,7 @@ def plugin_pack(app, plugin_name, request):
Args:
app(str): application name
plugin_name(str): the name of the plugin without plugin_ prefix
plugin_name(str): the name of the plugin without `plugin_` prefix
request: the current request app
Returns:

View File

@@ -7,7 +7,7 @@
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Functions required to execute app components
--------------------------------------------
---------------------------------------------
Note:
FOR INTERNAL USE ONLY

View File

@@ -1,11 +1,17 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Support for smart import syntax for web2py applications
-------------------------------------------------------
"""
import __builtin__
import os
import sys
import threading
import traceback
from gluon import current
NATIVE_IMPORTER = __builtin__.__import__
@@ -35,10 +41,10 @@ class CustomImportException(ImportError):
def custom_importer(name, globals=None, locals=None, fromlist=None, level=-1):
"""
The web2py custom importer. Like the standard Python importer but it
tries to transform import statements as something like
web2py's custom importer. It behaves like the standard Python importer but
it tries to transform import statements as something like
"import applications.app_name.modules.x".
If the import failed, fall back on naive_importer
If the import fails, it falls back on naive_importer
"""
globals = globals or {}
@@ -102,7 +108,7 @@ def custom_importer(name, globals=None, locals=None, fromlist=None, level=-1):
class TrackImporter(object):
"""
An importer tracking the date of the module files and reloading them when
they have changed.
they are changed.
"""
THREAD_LOCAL = threading.local()
@@ -143,7 +149,7 @@ class TrackImporter(object):
def _reload_check(self, name, globals, locals, level):
"""
Update the date associated to the module and reload the module if
the file has changed.
the file changed.
"""
module = sys.modules.get(name)
file = self._get_module_file(module)

View File

@@ -2,11 +2,13 @@
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Developed by Massimo Di Pierro <mdipierro@cs.depaul.edu>,
limodou <limodou@gmail.com> and srackham <srackham@gmail.com>.
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
| This file is part of the web2py Web Framework
| Developed by Massimo Di Pierro <mdipierro@cs.depaul.edu>,
| limodou <limodou@gmail.com> and srackham <srackham@gmail.com>.
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Debugger support classes
------------------------
"""
import logging
@@ -23,7 +25,7 @@ class Pipe(Queue.Queue):
Queue.Queue.__init__(self, *args, **kwargs)
def write(self, data):
logger.debug("debug %s writting %s" % (self.__name, data))
logger.debug("debug %s writing %s" % (self.__name, data))
self.put(data)
def flush(self):
@@ -183,9 +185,8 @@ parent_queue, child_queue = Queue.Queue(), Queue.Queue()
front_conn = qdb.QueuePipe("parent", parent_queue, child_queue)
child_conn = qdb.QueuePipe("child", child_queue, parent_queue)
web_debugger = WebDebugger(front_conn) # frontend
qdb_debugger = qdb.Qdb(
pipe=child_conn, redirect_stdio=False, skip=None) # backend
web_debugger = WebDebugger(front_conn) # frontend
qdb_debugger = qdb.Qdb(pipe=child_conn, redirect_stdio=False, skip=None) # backend
dbg = qdb_debugger
# enable getting context (stack, globals/locals) at interaction

View File

@@ -1,7 +1,8 @@
import codecs
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Caller will hand this library a buffer and ask it to either convert
"""
Caller will hand this library a buffer and ask it to either convert
it or auto-detect the type.
Based on http://code.activestate.com/recipes/52257/
@@ -9,6 +10,8 @@ Based on http://code.activestate.com/recipes/52257/
Licensed under the PSF License
"""
import codecs
# None represents a potentially variable byte. "##" in the XML spec...
autodetect_dict = { # bytepattern : ("name",
(0x00, 0x00, 0xFE, 0xFF): ("ucs4_be"),
@@ -25,15 +28,15 @@ autodetect_dict = { # bytepattern : ("name",
def autoDetectXMLEncoding(buffer):
""" buffer -> encoding_name
The buffer should be at least 4 bytes long.
Returns None if encoding cannot be detected.
Note that encoding_name might not have an installed
decoder (e.g. EBCDIC)
Returns None if encoding cannot be detected.
Note that encoding_name might not have an installed
decoder (e.g. EBCDIC)
"""
# a more efficient implementation would not decode the whole
# buffer at once but otherwise we'd have to decode a character at
# a time looking for the quote character...that's a pain
encoding = "utf_8" # according to the XML spec, this is the default
encoding = "utf_8" # according to the XML spec, this is the default
# this code successively tries to refine the default
# whenever it fails to refine, it falls back to
# the last place encoding was set.
@@ -49,7 +52,7 @@ def autoDetectXMLEncoding(buffer):
if enc_info:
encoding = enc_info # we've got a guess... these are
#the new defaults
#the new defaults
# try to find a more precise encoding using xml declaration
secret_decoder_ring = codecs.lookup(encoding)[1]

View File

@@ -2,9 +2,12 @@
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
File operations
---------------
"""
import storage
@@ -45,7 +48,15 @@ __all__ = [
def parse_semantic(version="Version 1.99.0-rc.1+timestamp.2011.09.19.08.23.26"):
"http://semver.org/"
"""Parses a version string according to http://semver.org/ rules
Args:
version(str): the SemVer string
Returns:
tuple: Major, Minor, Patch, Release, Build Date
"""
re_version = re.compile('(\d+)\.(\d+)\.(\d+)(\-(?P<pre>[^\s+]*))?(\+(?P<build>\S*))')
m = re_version.match(version.strip().split()[-1])
if not m:
@@ -58,6 +69,15 @@ def parse_semantic(version="Version 1.99.0-rc.1+timestamp.2011.09.19.08.23.26"):
return (a, b, c, pre_release, build)
def parse_legacy(version="Version 1.99.0 (2011-09-19 08:23:26)"):
"""Parses "legacy" version string
Args:
version(str): the version string
Returns:
tuple: Major, Minor, Patch, Release, Build Date
"""
re_version = re.compile('[^\d]+ (\d+)\.(\d+)\.(\d+)\s*\((?P<datetime>.+?)\)\s*(?P<type>[a-z]+)?')
m = re_version.match(version)
a, b, c = int(m.group(1)), int(m.group(2)), int(m.group(3)),
@@ -66,13 +86,17 @@ def parse_legacy(version="Version 1.99.0 (2011-09-19 08:23:26)"):
return (a, b, c, pre_release, build)
def parse_version(version):
"""Attempts to parse SemVer, fallbacks on legacy
"""
version_tuple = parse_semantic(version)
if not version_tuple:
version_tuple = parse_legacy(version)
return version_tuple
def read_file(filename, mode='r'):
"returns content from filename, making sure to close the file explicitly on exit."
"""Returns content from filename, making sure to close the file explicitly
on exit.
"""
f = open(filename, mode)
try:
return f.read()
@@ -81,7 +105,9 @@ def read_file(filename, mode='r'):
def write_file(filename, value, mode='w'):
"writes <value> to filename, making sure to close the file explicitly on exit."
"""Writes <value> to filename, making sure to close the file
explicitly on exit.
"""
f = open(filename, mode)
try:
return f.write(value)
@@ -90,7 +116,8 @@ def write_file(filename, value, mode='w'):
def readlines_file(filename, mode='r'):
"applies .split('\n') to the output of read_file()"
"""Applies .split('\n') to the output of `read_file()`
"""
return read_file(filename, mode).split('\n')
@@ -112,8 +139,8 @@ def listdir(
maxnum = None,
):
"""
like os.listdir() but you can specify a regex pattern to filter files.
if add_dirs is True, the returned items will have the full path.
Like `os.listdir()` but you can specify a regex pattern to filter files.
If `add_dirs` is True, the returned items will have the full path.
"""
if path[-1:] != os.path.sep:
path = path + os.path.sep
@@ -122,7 +149,7 @@ def listdir(
else:
n = 0
regex = re.compile(expression)
items = []
items = []
for (root, dirs, files) in os.walk(path, topdown=True):
for dir in dirs[:]:
if dir.startswith('.'):
@@ -141,6 +168,8 @@ def listdir(
def recursive_unlink(f):
"""Deletes `f`. If it's a folder, also its contents will be deleted
"""
if os.path.isdir(f):
for s in os.listdir(f):
recursive_unlink(os.path.join(f, s))
@@ -150,8 +179,7 @@ def recursive_unlink(f):
def cleanpath(path):
"""
turns any expression/path into a valid filename. replaces / with _ and
"""Turns any expression/path into a valid filename. replaces / with _ and
removes special characters.
"""
@@ -165,6 +193,7 @@ def cleanpath(path):
def _extractall(filename, path='.', members=None):
# FIXME: this should be dropped because python 2.4 support was dropped
if not hasattr(tarfile.TarFile, 'extractall'):
from tarfile import ExtractError
@@ -172,10 +201,10 @@ def _extractall(filename, path='.', members=None):
def extractall(self, path='.', members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
directories = []
@@ -226,8 +255,7 @@ def _extractall(filename, path='.', members=None):
def tar(file, dir, expression='^.+$', filenames=None):
"""
tars dir into file, only tars file that match expression
"""Tars dir into file, only tars file that match expression
"""
tar = tarfile.TarFile(file, 'w')
@@ -240,14 +268,21 @@ def tar(file, dir, expression='^.+$', filenames=None):
tar.close()
def untar(file, dir):
"""
untar file into dir
"""Untar file into dir
"""
_extractall(file, dir)
def w2p_pack(filename, path, compiled=False, filenames=None):
"""Packs a web2py application.
Args:
filename(str): path to the resulting archive
path(str): path to the application
compiled(bool): if `True` packs the compiled version
filenames(list): adds filenames to the archive
"""
filename = abspath(filename)
path = abspath(path)
tarname = filename + '.tar'
@@ -296,10 +331,12 @@ def w2p_unpack(filename, path, delete_tar=True):
def w2p_pack_plugin(filename, path, plugin_name):
"""Pack the given plugin into a w2p file.
Will match files at:
"""Packs the given plugin into a w2p file.
Will match files at::
<path>/*/plugin_[name].*
<path>/*/plugin_[name]/*
"""
filename = abspath(filename)
path = abspath(path)
@@ -329,9 +366,8 @@ def w2p_unpack_plugin(filename, path, delete_tar=True):
def tar_compiled(file, dir, expression='^.+$'):
"""
used to tar a compiled application.
the content of models, views, controllers is not stored in the tar file.
"""Used to tar a compiled application.
The content of models, views, controllers is not stored in the tar file.
"""
tar = tarfile.TarFile(file, 'w')
@@ -357,7 +393,7 @@ def up(path):
def get_session(request, other_application='admin'):
""" checks that user is authorized to access other_application"""
"""Checks that user is authorized to access other_application"""
if request.application == other_application:
raise KeyError
try:
@@ -370,7 +406,7 @@ def get_session(request, other_application='admin'):
return osession
def set_session(request, session, other_application='admin'):
""" checks that user is authorized to access other_application"""
"""Checks that user is authorized to access other_application"""
if request.application == other_application:
raise KeyError
session_id = request.cookies['session_id_' + other_application].value
@@ -380,7 +416,7 @@ def set_session(request, session, other_application='admin'):
def check_credentials(request, other_application='admin',
expiration=60 * 60, gae_login=True):
""" checks that user is authorized to access other_application"""
"""Checks that user is authorized to access other_application"""
if request.env.web2py_runtime_gae:
from google.appengine.api import users
if users.is_current_user_admin():
@@ -455,7 +491,9 @@ from settings import global_settings # we need to import settings here because
def abspath(*relpath, **base):
"convert relative path to absolute path based (by default) on applications_parent"
"""Converts relative path to absolute path based (by default) on
applications_parent
"""
path = os.path.join(*relpath)
gluon = base.get('gluon', False)
if os.path.isabs(path):

View File

@@ -2,9 +2,9 @@
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Contains the classes for the global used variables:
@@ -96,7 +96,7 @@ def sorting_dumps(obj, protocol=None):
def copystream_progress(request, chunk_size=10 ** 5):
"""
copies request.env.wsgi_input into request.body
Copies request.env.wsgi_input into request.body
and stores progress upload status in cache_ram
X-Progress-ID:length and X-Progress-ID:uploaded
"""
@@ -143,7 +143,7 @@ def copystream_progress(request, chunk_size=10 ** 5):
class Request(Storage):
"""
defines the request object and the default values of its members
Defines the request object and the default values of its members
- env: environment variables, by gluon.main.wsgibase()
- cookies
@@ -155,7 +155,10 @@ class Request(Storage):
- function
- args
- extension
- now: datetime.datetime.today()
- now: datetime.datetime.now()
- utcnow : datetime.datetime.utcnow()
- is_local
- is_https
- restful()
"""
@@ -183,6 +186,8 @@ class Request(Storage):
def parse_get_vars(self):
"""Takes the QUERY_STRING and unpacks it to get_vars
"""
query_string = self.env.get('QUERY_STRING','')
dget = cgi.parse_qs(query_string, keep_blank_values=1)
get_vars = self._get_vars = Storage(dget)
@@ -191,6 +196,9 @@ class Request(Storage):
get_vars[key] = value[0]
def parse_post_vars(self):
"""Takes the body of the request and unpacks it into
post_vars. application/json is also automatically parsed
"""
env = self.env
post_vars = self._post_vars = Storage()
body = self.body
@@ -233,10 +241,10 @@ class Request(Storage):
if key is None:
continue # not sure why cgi.FieldStorage returns None key
dpk = dpost[key]
# if an element is not a file replace it with
# if an element is not a file replace it with
# its value else leave it alone
pvalue = listify([(_dpk if _dpk.filename else _dpk.value)
pvalue = listify([(_dpk if _dpk.filename else _dpk.value)
for _dpk in dpk]
if isinstance(dpk, list) else
(dpk if dpk.filename else dpk.value))
@@ -253,6 +261,8 @@ class Request(Storage):
return self._body
def parse_all_vars(self):
"""Merges get_vars and post_vars to vars
"""
self._vars = copy.copy(self.get_vars)
for key,value in self.post_vars.iteritems():
if not key in self._vars:
@@ -264,21 +274,24 @@ class Request(Storage):
@property
def get_vars(self):
"lazily parse the query string into get_vars"
"""Lazily parses the query string into get_vars
"""
if self._get_vars is None:
self.parse_get_vars()
return self._get_vars
@property
def post_vars(self):
"lazily parse the body into post_vars"
"""Lazily parse the body into post_vars
"""
if self._post_vars is None:
self.parse_post_vars()
return self._post_vars
@property
def vars(self):
"lazily parse all get_vars and post_vars to fill vars"
"""Lazily parses all get_vars and post_vars to fill vars
"""
if self._vars is None:
self.parse_all_vars()
return self._vars
@@ -306,8 +319,8 @@ class Request(Storage):
def requires_https(self):
"""
If request comes in over HTTP, redirect it to HTTPS
and secure the session.
If request comes in over HTTP, redirects it to HTTPS
and secures the session.
"""
cmd_opts = global_settings.cmd_options
#checking if this is called within the scheduler or within the shell
@@ -349,7 +362,7 @@ class Request(Storage):
class Response(Storage):
"""
defines the response object and the default values of its members
Defines the response object and the default values of its members
response.write( ) can be used to write in the output html
"""
@@ -493,21 +506,25 @@ class Response(Storage):
filename=None
):
"""
if a controller function::
If in a controller function::
return response.stream(file, 100)
the file content will be streamed at 100 bytes at the time
Optional kwargs:
(for custom stream calls)
attachment=True # Send as attachment. Usually creates a
# pop-up download window on browsers
filename=None # The name for the attachment
Args:
stream: filename or read()able content
chunk_size(int): Buffer size
request: the request object
attachment(bool): prepares the correct headers to download the file
as an attachment. Usually creates a pop-up download window
on browsers
filename(str): the name for the attachment
Note: for using the stream name (filename) with attachments
the option must be explicitly set as function parameter(will
default to the last request argument otherwise)
Note:
for using the stream name (filename) with attachments
the option must be explicitly set as function parameter (will
default to the last request argument otherwise)
"""
headers = self.headers
@@ -559,12 +576,12 @@ class Response(Storage):
def download(self, request, db, chunk_size=DEFAULT_CHUNK_SIZE, attachment=True, download_filename=None):
"""
example of usage in controller::
Example of usage in controller::
def download():
return response.download(request, db)
downloads from http://..../download/filename
Downloads from http://..../download/filename
"""
current.session.forget(current.response)
@@ -572,8 +589,7 @@ class Response(Storage):
if not request.args:
raise HTTP(404)
name = request.args[-1]
items = re.compile('(?P<table>.*?)\.(?P<field>.*?)\..*')\
.match(name)
items = re.compile('(?P<table>.*?)\.(?P<field>.*?)\..*').match(name)
if not items:
raise HTTP(404)
(t, f) = (items.group('table'), items.group('field'))
@@ -671,34 +687,34 @@ class Response(Storage):
class Session(Storage):
"""
defines the session object and the default values of its members (None)
Defines the session object and the default values of its members (None)
response.session_storage_type : 'file', 'db', or 'cookie'
response.session_cookie_compression_level :
response.session_cookie_expires : cookie expiration
response.session_cookie_key : for encrypted sessions in cookies
response.session_id : a number or None if no session
response.session_id_name :
response.session_locked :
response.session_masterapp :
response.session_new : a new session obj is being created
response.session_hash : hash of the pickled loaded session
response.session_pickled : picked session
- session_storage_type : 'file', 'db', or 'cookie'
- session_cookie_compression_level :
- session_cookie_expires : cookie expiration
- session_cookie_key : for encrypted sessions in cookies
- session_id : a number or None if no session
- session_id_name :
- session_locked :
- session_masterapp :
- session_new : a new session obj is being created
- session_hash : hash of the pickled loaded session
- session_pickled : picked session
if session in cookie:
response.session_data_name : name of the cookie for session data
- session_data_name : name of the cookie for session data
if session in db:
response.session_db_record_id :
response.session_db_table :
response.session_db_unique_key :
- session_db_record_id
- session_db_table
- session_db_unique_key
if session in file:
response.session_file :
response.session_filename :
- session_file
- session_filename
"""
def connect(
@@ -716,9 +732,26 @@ class Session(Storage):
compression_level=None
):
"""
separate can be separate=lambda(session_name): session_name[-2:]
and it is used to determine a session prefix.
separate can be True and it is set to session_name[-2:]
Used in models, allows to customize Session handling
Args:
request: the request object
response: the response object
db: to store/retrieve sessions in db (a table is created)
tablename(str): table name
masterapp(str): points to another's app sessions. This enables a
"SSO" environment among apps
migrate: passed to the underlying db
separate: with True, creates a folder with the 2 initials of the
session id. Can also be a function, e.g. ::
separate=lambda(session_name): session_name[-2:]
check_client: if True, sessions can only come from the same ip
cookie_key(str): secret for cookie encryption
cookie_expires: sets the expiration of the cookie
compression_level(int): 0-9, sets zlib compression on the data
before the encryption
"""
request = request or current.request
response = response or current.response
@@ -736,7 +769,7 @@ class Session(Storage):
response.session_cookie_compression_level = compression_level
# check if there is a session_id in cookies
try:
try:
old_session_id = cookies[response.session_id_name].value
except KeyError:
old_session_id = None

View File

@@ -2,9 +2,9 @@
# -*- coding: utf-8 -*-
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
"""
import re
@@ -15,8 +15,7 @@ __all__ = ['highlight']
class Highlighter(object):
"""
Do syntax highlighting.
"""Does syntax highlighting.
"""
def __init__(
@@ -26,7 +25,7 @@ class Highlighter(object):
styles=None,
):
"""
Initialise highlighter:
Initialize highlighter:
mode = language (PYTHON, WEB2PY,C, CPP, HTML, HTML_PLAIN)
"""
styles = styles or {}