Merge branch 'master' of github.com:web2py/web2py
This commit is contained in:
@@ -10,6 +10,7 @@ import datetime
|
||||
import copy
|
||||
import gluon.contenttype
|
||||
import gluon.fileutils
|
||||
from gluon._compat import iteritems
|
||||
|
||||
try:
|
||||
import pygraphviz as pgv
|
||||
@@ -267,7 +268,7 @@ def select():
|
||||
else:
|
||||
rows = db(query, ignore_common_filters=True).select(
|
||||
*fields, limitby=(start, stop))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
(rows, nrows) = ([], 0)
|
||||
@@ -286,7 +287,7 @@ def select():
|
||||
import_csv(db[request.vars.table],
|
||||
request.vars.csvfile.file)
|
||||
response.flash = T('data uploaded')
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
response.flash = DIV(T('unable to parse csv file'), PRE(str(e)))
|
||||
# end handle upload csv
|
||||
|
||||
@@ -454,7 +455,7 @@ def ccache():
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
|
||||
for key, value in cache.ram.storage.iteritems():
|
||||
for key, value in iteritems(cache.ram.storage):
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
|
||||
@@ -15,6 +15,7 @@ from gluon.utils import web2py_uuid
|
||||
from gluon.tools import Config
|
||||
from gluon.compileapp import find_exposed_functions
|
||||
from glob import glob
|
||||
from gluon._compat import iteritems, PY2
|
||||
import shutil
|
||||
import platform
|
||||
|
||||
@@ -23,7 +24,7 @@ try:
|
||||
if git.__version__ < '0.3.1':
|
||||
raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__)
|
||||
have_git = True
|
||||
except ImportError, e:
|
||||
except ImportError as e:
|
||||
have_git = False
|
||||
GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e
|
||||
|
||||
@@ -81,7 +82,10 @@ def safe_open(a, b):
|
||||
def close(self):
|
||||
pass
|
||||
return tmp()
|
||||
return open(a, b)
|
||||
if PY2 or 'b' in b:
|
||||
return open(a, b)
|
||||
else:
|
||||
return open(a, b, encoding="utf8")
|
||||
|
||||
|
||||
def safe_read(a, b='r'):
|
||||
@@ -123,6 +127,7 @@ def index():
|
||||
redirect(send)
|
||||
elif failed_login_count() >= allowed_number_of_attempts:
|
||||
time.sleep(2 ** allowed_number_of_attempts)
|
||||
print('4033')
|
||||
raise HTTP(403)
|
||||
elif request.vars.password:
|
||||
if verify_password(request.vars.password[:1024]):
|
||||
@@ -262,7 +267,7 @@ def site():
|
||||
new_repo = git.Repo.clone_from(form_update.vars.url, target)
|
||||
session.flash = T('new application "%s" imported',
|
||||
form_update.vars.name)
|
||||
except git.GitCommandError, err:
|
||||
except git.GitCommandError as err:
|
||||
session.flash = T('Invalid git repository specified.')
|
||||
redirect(URL(r=request))
|
||||
|
||||
@@ -272,7 +277,7 @@ def site():
|
||||
f = urllib.urlopen(form_update.vars.url)
|
||||
if f.code == 404:
|
||||
raise Exception("404 file not found")
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
session.flash = \
|
||||
DIV(T('Unable to download app because:'), PRE(repr(e)))
|
||||
redirect(URL(r=request))
|
||||
@@ -313,7 +318,7 @@ def site():
|
||||
if FILTER_APPS:
|
||||
apps = [f for f in apps if f in FILTER_APPS]
|
||||
|
||||
apps = sorted(apps, lambda a, b: cmp(a.upper(), b.upper()))
|
||||
apps = sorted(apps, key=lambda a: a.upper())
|
||||
myplatform = platform.python_version()
|
||||
return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform,
|
||||
form_create=form_create, form_update=form_update)
|
||||
@@ -347,7 +352,7 @@ def pack():
|
||||
else:
|
||||
fname = 'web2py.app.%s.compiled.w2p' % app
|
||||
filename = app_pack_compiled(app, request, raise_ex=True)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
filename = None
|
||||
|
||||
if filename:
|
||||
@@ -421,7 +426,7 @@ def pack_custom():
|
||||
fname = 'web2py.app.%s.w2p' % app
|
||||
try:
|
||||
filename = app_pack(app, request, raise_ex=True, filenames=files)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
filename = None
|
||||
if filename:
|
||||
response.headers['Content-Type'] = 'application/w2p'
|
||||
@@ -732,7 +737,7 @@ def edit():
|
||||
try:
|
||||
code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n'
|
||||
compile(code, path, "exec", _ast.PyCF_ONLY_AST)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# offset calculation is only used for textarea (start/stop)
|
||||
start = sum([len(line) + 1 for l, line
|
||||
in enumerate(request.vars.data.split("\n"))
|
||||
@@ -757,11 +762,11 @@ def edit():
|
||||
# Lets try to reload the modules
|
||||
try:
|
||||
mopath = '.'.join(request.args[2:])[:-3]
|
||||
exec 'import applications.%s.modules.%s' % (
|
||||
request.args[0], mopath)
|
||||
exec('import applications.%s.modules.%s' % (
|
||||
request.args[0], mopath))
|
||||
reload(sys.modules['applications.%s.modules.%s'
|
||||
% (request.args[0], mopath)])
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
response.flash = DIV(
|
||||
T('failed to reload module because:'), PRE(repr(e)))
|
||||
|
||||
@@ -1151,7 +1156,7 @@ def design():
|
||||
# Get all languages
|
||||
langpath = os.path.join(apath(app, r=request), 'languages')
|
||||
languages = dict([(lang, info) for lang, info
|
||||
in read_possible_languages(langpath).iteritems()
|
||||
in iteritems(read_possible_languages(langpath))
|
||||
if info[2] != 0]) # info[2] is langfile_mtime:
|
||||
# get only existed files
|
||||
|
||||
@@ -1287,7 +1292,7 @@ def plugin():
|
||||
|
||||
# Get all languages
|
||||
languages = sorted([lang + '.py' for lang, info in
|
||||
T.get_possible_languages_info().iteritems()
|
||||
iteritems(T.get_possible_languages_info())
|
||||
if info[2] != 0]) # info[2] is langfile_mtime:
|
||||
# get only existed files
|
||||
|
||||
@@ -1468,7 +1473,7 @@ def create_file():
|
||||
redirect(URL('edit',
|
||||
args=[os.path.join(request.vars.location, filename)], vars=vars))
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if not isinstance(e, HTTP):
|
||||
session.flash = T('cannot create file')
|
||||
|
||||
@@ -1661,7 +1666,7 @@ def errors():
|
||||
pickel=error, causer=error_causer,
|
||||
last_line=last_line, hash=hash,
|
||||
ticket=fn.ticket_id)
|
||||
except AttributeError, e:
|
||||
except AttributeError as e:
|
||||
tk_db(tk_table.id == fn.id).delete()
|
||||
tk_db.commit()
|
||||
|
||||
|
||||
@@ -700,6 +700,7 @@
|
||||
'Welcome to web2py': 'Vitejte ve Web2py aplikaci.',
|
||||
'Welcome to web2py!': 'Vítejte ve Web2py aplikaci.',
|
||||
'Which called the function %s located in the file %s': 'která zavolala funkci %s v souboru (kontroléru) %s.',
|
||||
'Working...': 'Pracuji...',
|
||||
'WSGI reference name': 'jméno WSGI reference',
|
||||
'YES': 'ANO',
|
||||
'Yes': 'Ano',
|
||||
|
||||
@@ -122,7 +122,7 @@
|
||||
{{=XML(snapshot.get('response','no response available in snapshot'))}}
|
||||
</div>
|
||||
</div>
|
||||
{{except Exception, e:}}
|
||||
{{except Exception as e:}}
|
||||
<!-- this should not happen, just in case... (cannot output normal hmtl as we don't know current open tags) -->
|
||||
{{import traceback;tb=traceback.format_exc().replace("\n","\\n") }}
|
||||
<script language='javascript'>alert("Exception during snapshot rendering: {{=tb}} ");</script>
|
||||
|
||||
@@ -117,7 +117,7 @@
|
||||
<div class="hide inspect" id="session"><h6>session</h6>{{=BEAUTIFY(snapshot['session'])}}</div>
|
||||
<div class="hide inspect" id="response"><h6>response</h6>{{=BEAUTIFY(snapshot['response'])}}</div>
|
||||
</div>
|
||||
{{except Exception, e:}}
|
||||
{{except Exception as e:}}
|
||||
<!-- this should not happen, just in case... (cannot output normal hmtl as we don't know current open tags) -->
|
||||
{{import traceback;tb=traceback.format_exc().replace("\n","\\n") }}
|
||||
<script language='javascript'>alert("Exception during snapshot rendering: {{=tb}} ");</script>
|
||||
|
||||
@@ -61,7 +61,7 @@
|
||||
{{if hasattr(T,'get_possible_languages_info'):}}
|
||||
- {{=T('Admin language')}}</span>
|
||||
<select name="adminlanguage" onchange="var date = new Date();cookieDate=date.setTime(date.getTime()+(100*24*60*60*1000));document.cookie='adminLanguage='+this.options[this.selectedIndex].id+'; expires='+cookieDate+'; path=/';window.location.reload()">
|
||||
{{for langinfo in sorted([(code,info[1]) for code,info in T.get_possible_languages_info().iteritems() if code != 'default']):}}
|
||||
{{for langinfo in sorted([(code,info[1]) for code,info in iteritems(T.get_possible_languages_info()) if code != 'default']):}}
|
||||
<option {{=T.accepted_language==langinfo[0] and 'selected' or ''}} {{='id='+langinfo[0]}} >{{=langinfo[1]}}</option>
|
||||
{{pass}}
|
||||
</select>
|
||||
|
||||
@@ -10,6 +10,7 @@ import datetime
|
||||
import copy
|
||||
import gluon.contenttype
|
||||
import gluon.fileutils
|
||||
from gluon._compat import iteritems
|
||||
|
||||
try:
|
||||
import pygraphviz as pgv
|
||||
@@ -267,7 +268,7 @@ def select():
|
||||
else:
|
||||
rows = db(query, ignore_common_filters=True).select(
|
||||
*fields, limitby=(start, stop))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
(rows, nrows) = ([], 0)
|
||||
@@ -286,7 +287,7 @@ def select():
|
||||
import_csv(db[request.vars.table],
|
||||
request.vars.csvfile.file)
|
||||
response.flash = T('data uploaded')
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
response.flash = DIV(T('unable to parse csv file'), PRE(str(e)))
|
||||
# end handle upload csv
|
||||
|
||||
@@ -454,7 +455,7 @@ def ccache():
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
|
||||
for key, value in cache.ram.storage.iteritems():
|
||||
for key, value in iteritems(cache.ram.storage):
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
|
||||
@@ -44,7 +44,7 @@ def test_soap_sub():
|
||||
try:
|
||||
ret = client.SubIntegers(a=3, b=2)
|
||||
result = ret['SubResult']
|
||||
except SoapFault, sf:
|
||||
except SoapFault as sf:
|
||||
result = sf
|
||||
response.view = "soap_examples/generic.html"
|
||||
return dict(xml_request=client.xml_request,
|
||||
|
||||
@@ -28,7 +28,7 @@ def get_content(b=None,
|
||||
|
||||
try:
|
||||
openedfile = openfile()
|
||||
except Exception, IOError:
|
||||
except (Exception, IOError):
|
||||
l = 'en'
|
||||
openedfile = openfile()
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import datetime
|
||||
import copy
|
||||
import gluon.contenttype
|
||||
import gluon.fileutils
|
||||
from gluon._compat import iteritems
|
||||
|
||||
try:
|
||||
import pygraphviz as pgv
|
||||
@@ -454,7 +455,7 @@ def ccache():
|
||||
except (KeyError, ZeroDivisionError):
|
||||
ram['ratio'] = 0
|
||||
|
||||
for key, value in cache.ram.storage.iteritems():
|
||||
for key, value in iteritems(cache.ram.storage):
|
||||
if hp:
|
||||
ram['bytes'] += hp.iso(value[1]).size
|
||||
ram['objects'] += hp.iso(value[1]).count
|
||||
|
||||
@@ -14,9 +14,9 @@
|
||||
'(requires internet access)': '(vyžaduje připojení k internetu)',
|
||||
'(requires internet access, experimental)': '(vyžaduje internetové připojení, experimentální)',
|
||||
'(something like "it-it")': '(například "cs-cz")',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '@markmin\x01(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': '@markmin\x01Došlo k chybě, prosím [[obnovte stránku %s]]',
|
||||
'@markmin\x01Searching: **%s** %%{file}': '@markmin\x01Hledání: **%s** %%{soubor}',
|
||||
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(soubor **gluon/contrib/plural_rules/%s.py** nenalezen)',
|
||||
'@markmin\x01An error occured, please [[reload %s]] the page': 'Došlo k chybě, prosím [[obnovte stránku %s]]',
|
||||
'@markmin\x01Searching: **%s** %%{file}': 'Hledání: **%s** %%{soubor}',
|
||||
'About': 'O programu',
|
||||
'About application': 'O aplikaci',
|
||||
'Access Control': 'Řízení přístupu',
|
||||
|
||||
@@ -14,7 +14,8 @@ names = localhost:*, 127.0.0.1:*, *:*, *
|
||||
[db]
|
||||
uri = sqlite://storage.sqlite
|
||||
migrate = true
|
||||
pool_size = 10 ; ignored for sqlite
|
||||
; ignored for sqlite
|
||||
pool_size = 10
|
||||
|
||||
; smtp address and credentials
|
||||
[smtp]
|
||||
@@ -27,4 +28,4 @@ ssl = true
|
||||
; form styling
|
||||
[forms]
|
||||
formstyle = bootstrap3_inline
|
||||
separator =
|
||||
separator =
|
||||
|
||||
@@ -30,6 +30,7 @@ if PY2:
|
||||
from string import maketrans
|
||||
from types import ClassType
|
||||
import cgi
|
||||
import cookielib
|
||||
reduce = reduce
|
||||
hashlib_md5 = hashlib.md5
|
||||
iterkeys = lambda d: d.iterkeys()
|
||||
@@ -94,6 +95,7 @@ else:
|
||||
from email.charset import Charset, add_charset, QP as charset_QP
|
||||
from urllib.request import FancyURLopener, urlopen
|
||||
from urllib.parse import quote as urllib_quote, unquote as urllib_unquote, urlencode
|
||||
from http import cookiejar as cookielib
|
||||
import html
|
||||
hashlib_md5 = lambda s: hashlib.md5(bytes(s, 'utf8'))
|
||||
iterkeys = lambda d: iter(d.keys())
|
||||
|
||||
@@ -18,7 +18,7 @@ import fnmatch
|
||||
import os
|
||||
import copy
|
||||
import random
|
||||
from gluon._compat import builtin, PY2, unicodeT, to_native, basestring
|
||||
from gluon._compat import builtin, PY2, unicodeT, to_native, to_bytes, iteritems, basestring
|
||||
from gluon.storage import Storage, List
|
||||
from gluon.template import parse_template
|
||||
from gluon.restricted import restricted, compile2
|
||||
@@ -395,6 +395,11 @@ _base_environment_['SQLField'] = Field # for backward compatibility
|
||||
_base_environment_['SQLFORM'] = SQLFORM
|
||||
_base_environment_['SQLTABLE'] = SQLTABLE
|
||||
_base_environment_['LOAD'] = LOAD
|
||||
# For an easier PY3 migration
|
||||
_base_environment_['PY2'] = PY2
|
||||
_base_environment_['to_native'] = to_native
|
||||
_base_environment_['to_bytes'] = to_bytes
|
||||
_base_environment_['iteritems'] = iteritems
|
||||
|
||||
def build_environment(request, response, session, store_current=True):
|
||||
"""
|
||||
|
||||
2
gluon/contrib/pysimplesoap/__init__.py
Executable file → Normal file
2
gluon/contrib/pysimplesoap/__init__.py
Executable file → Normal file
@@ -8,7 +8,7 @@ __author__ = "Mariano Reingart"
|
||||
__author_email__ = "reingart@gmail.com"
|
||||
__copyright__ = "Copyright (C) 2013 Mariano Reingart"
|
||||
__license__ = "LGPL 3.0"
|
||||
__version__ = "1.11"
|
||||
__version__ = "1.16"
|
||||
|
||||
TIMEOUT = 60
|
||||
|
||||
|
||||
433
gluon/contrib/pysimplesoap/c14n.py
Normal file
433
gluon/contrib/pysimplesoap/c14n.py
Normal file
@@ -0,0 +1,433 @@
|
||||
#! /usr/bin/env python
|
||||
'''XML Canonicalization
|
||||
|
||||
Patches Applied to xml.dom.ext.c14n:
|
||||
http://sourceforge.net/projects/pyxml/
|
||||
|
||||
[ 1444526 ] c14n.py: http://www.w3.org/TR/xml-exc-c14n/ fix
|
||||
-- includes [ 829905 ] c14n.py fix for bug #825115,
|
||||
Date Submitted: 2003-10-24 23:43
|
||||
-- include dependent namespace declarations declared in ancestor nodes
|
||||
(checking attributes and tags),
|
||||
-- handle InclusiveNamespaces PrefixList parameter
|
||||
|
||||
This module generates canonical XML of a document or element.
|
||||
http://www.w3.org/TR/2001/REC-xml-c14n-20010315
|
||||
and includes a prototype of exclusive canonicalization
|
||||
http://www.w3.org/Signature/Drafts/xml-exc-c14n
|
||||
|
||||
Requires PyXML 0.7.0 or later.
|
||||
|
||||
Known issues if using Ft.Lib.pDomlette:
|
||||
1. Unicode
|
||||
2. does not white space normalize attributes of type NMTOKEN and ID?
|
||||
3. seems to be include "\n" after importing external entities?
|
||||
|
||||
Note, this version processes a DOM tree, and consequently it processes
|
||||
namespace nodes as attributes, not from a node's namespace axis. This
|
||||
permits simple document and element canonicalization without
|
||||
XPath. When XPath is used, the XPath result node list is passed and used to
|
||||
determine if the node is in the XPath result list, but little else.
|
||||
|
||||
Authors:
|
||||
"Joseph M. Reagle Jr." <reagle@w3.org>
|
||||
"Rich Salz" <rsalz@zolera.com>
|
||||
|
||||
$Date: 2006-03-30 23:47:16 +0000 (Thu, 30 Mar 2006) $ by $Author: boverhof $
|
||||
'''
|
||||
|
||||
_copyright = '''Copyright 2001, Zolera Systems Inc. All Rights Reserved.
|
||||
Copyright 2001, MIT. All Rights Reserved.
|
||||
|
||||
Distributed under the terms of:
|
||||
Python 2.0 License or later.
|
||||
http://www.python.org/2.0.1/license.html
|
||||
or
|
||||
W3C Software License
|
||||
http://www.w3.org/Consortium/Legal/copyright-software-19980720
|
||||
'''
|
||||
|
||||
import string
|
||||
from xml.dom import Node
|
||||
try:
|
||||
from xml.ns import XMLNS
|
||||
except:
|
||||
class XMLNS:
|
||||
BASE = "http://www.w3.org/2000/xmlns/"
|
||||
XML = "http://www.w3.org/XML/1998/namespace"
|
||||
try:
|
||||
import cStringIO
|
||||
StringIO = cStringIO
|
||||
except ImportError:
|
||||
import StringIO
|
||||
|
||||
_attrs = lambda E: (E.attributes and E.attributes.values()) or []
|
||||
_children = lambda E: E.childNodes or []
|
||||
_IN_XML_NS = lambda n: n.name.startswith("xmlns")
|
||||
_inclusive = lambda n: n.unsuppressedPrefixes == None
|
||||
|
||||
|
||||
# Does a document/PI has lesser/greater document order than the
|
||||
# first element?
|
||||
_LesserElement, _Element, _GreaterElement = range(3)
|
||||
|
||||
def _sorter(n1,n2):
|
||||
'''_sorter(n1,n2) -> int
|
||||
Sorting predicate for non-NS attributes.'''
|
||||
|
||||
i = cmp(n1.namespaceURI, n2.namespaceURI)
|
||||
if i: return i
|
||||
return cmp(n1.localName, n2.localName)
|
||||
|
||||
|
||||
def _sorter_ns(n1,n2):
|
||||
'''_sorter_ns((n,v),(n,v)) -> int
|
||||
"(an empty namespace URI is lexicographically least)."'''
|
||||
|
||||
if n1[0] == 'xmlns': return -1
|
||||
if n2[0] == 'xmlns': return 1
|
||||
return cmp(n1[0], n2[0])
|
||||
|
||||
def _utilized(n, node, other_attrs, unsuppressedPrefixes):
|
||||
'''_utilized(n, node, other_attrs, unsuppressedPrefixes) -> boolean
|
||||
Return true if that nodespace is utilized within the node'''
|
||||
if n.startswith('xmlns:'):
|
||||
n = n[6:]
|
||||
elif n.startswith('xmlns'):
|
||||
n = n[5:]
|
||||
if (n=="" and node.prefix in ["#default", None]) or \
|
||||
n == node.prefix or n in unsuppressedPrefixes:
|
||||
return 1
|
||||
for attr in other_attrs:
|
||||
if n == attr.prefix: return 1
|
||||
# For exclusive need to look at attributes
|
||||
if unsuppressedPrefixes is not None:
|
||||
for attr in _attrs(node):
|
||||
if n == attr.prefix: return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _inclusiveNamespacePrefixes(node, context, unsuppressedPrefixes):
|
||||
'''http://www.w3.org/TR/xml-exc-c14n/
|
||||
InclusiveNamespaces PrefixList parameter, which lists namespace prefixes that
|
||||
are handled in the manner described by the Canonical XML Recommendation'''
|
||||
inclusive = []
|
||||
if node.prefix:
|
||||
usedPrefixes = ['xmlns:%s' %node.prefix]
|
||||
else:
|
||||
usedPrefixes = ['xmlns']
|
||||
|
||||
for a in _attrs(node):
|
||||
if a.nodeName.startswith('xmlns') or not a.prefix: continue
|
||||
usedPrefixes.append('xmlns:%s' %a.prefix)
|
||||
|
||||
unused_namespace_dict = {}
|
||||
for attr in context:
|
||||
n = attr.nodeName
|
||||
if n in unsuppressedPrefixes:
|
||||
inclusive.append(attr)
|
||||
elif n.startswith('xmlns:') and n[6:] in unsuppressedPrefixes:
|
||||
inclusive.append(attr)
|
||||
elif n.startswith('xmlns') and n[5:] in unsuppressedPrefixes:
|
||||
inclusive.append(attr)
|
||||
elif attr.nodeName in usedPrefixes:
|
||||
inclusive.append(attr)
|
||||
elif n.startswith('xmlns:'):
|
||||
unused_namespace_dict[n] = attr.value
|
||||
|
||||
return inclusive, unused_namespace_dict
|
||||
|
||||
#_in_subset = lambda subset, node: not subset or node in subset
|
||||
_in_subset = lambda subset, node: subset is None or node in subset # rich's tweak
|
||||
|
||||
|
||||
class _implementation:
|
||||
'''Implementation class for C14N. This accompanies a node during it's
|
||||
processing and includes the parameters and processing state.'''
|
||||
|
||||
# Handler for each node type; populated during module instantiation.
|
||||
handlers = {}
|
||||
|
||||
def __init__(self, node, write, **kw):
|
||||
'''Create and run the implementation.'''
|
||||
self.write = write
|
||||
self.subset = kw.get('subset')
|
||||
self.comments = kw.get('comments', 0)
|
||||
self.unsuppressedPrefixes = kw.get('unsuppressedPrefixes')
|
||||
nsdict = kw.get('nsdict', { 'xml': XMLNS.XML, 'xmlns': XMLNS.BASE })
|
||||
|
||||
# Processing state.
|
||||
self.state = (nsdict, {'xml':''}, {}, {}) #0422
|
||||
|
||||
if node.nodeType == Node.DOCUMENT_NODE:
|
||||
self._do_document(node)
|
||||
elif node.nodeType == Node.ELEMENT_NODE:
|
||||
self.documentOrder = _Element # At document element
|
||||
if not _inclusive(self):
|
||||
inherited,unused = _inclusiveNamespacePrefixes(node, self._inherit_context(node),
|
||||
self.unsuppressedPrefixes)
|
||||
self._do_element(node, inherited, unused=unused)
|
||||
else:
|
||||
inherited = self._inherit_context(node)
|
||||
self._do_element(node, inherited)
|
||||
elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
|
||||
pass
|
||||
else:
|
||||
raise TypeError(str(node))
|
||||
|
||||
|
||||
def _inherit_context(self, node):
|
||||
'''_inherit_context(self, node) -> list
|
||||
Scan ancestors of attribute and namespace context. Used only
|
||||
for single element node canonicalization, not for subset
|
||||
canonicalization.'''
|
||||
|
||||
# Collect the initial list of xml:foo attributes.
|
||||
xmlattrs = filter(_IN_XML_NS, _attrs(node))
|
||||
|
||||
# Walk up and get all xml:XXX attributes we inherit.
|
||||
inherited, parent = [], node.parentNode
|
||||
while parent and parent.nodeType == Node.ELEMENT_NODE:
|
||||
for a in filter(_IN_XML_NS, _attrs(parent)):
|
||||
n = a.localName
|
||||
if n not in xmlattrs:
|
||||
xmlattrs.append(n)
|
||||
inherited.append(a)
|
||||
parent = parent.parentNode
|
||||
return inherited
|
||||
|
||||
|
||||
def _do_document(self, node):
|
||||
'''_do_document(self, node) -> None
|
||||
Process a document node. documentOrder holds whether the document
|
||||
element has been encountered such that PIs/comments can be written
|
||||
as specified.'''
|
||||
|
||||
self.documentOrder = _LesserElement
|
||||
for child in node.childNodes:
|
||||
if child.nodeType == Node.ELEMENT_NODE:
|
||||
self.documentOrder = _Element # At document element
|
||||
self._do_element(child)
|
||||
self.documentOrder = _GreaterElement # After document element
|
||||
elif child.nodeType == Node.PROCESSING_INSTRUCTION_NODE:
|
||||
self._do_pi(child)
|
||||
elif child.nodeType == Node.COMMENT_NODE:
|
||||
self._do_comment(child)
|
||||
elif child.nodeType == Node.DOCUMENT_TYPE_NODE:
|
||||
pass
|
||||
else:
|
||||
raise TypeError(str(child))
|
||||
handlers[Node.DOCUMENT_NODE] = _do_document
|
||||
|
||||
|
||||
def _do_text(self, node):
|
||||
'''_do_text(self, node) -> None
|
||||
Process a text or CDATA node. Render various special characters
|
||||
as their C14N entity representations.'''
|
||||
if not _in_subset(self.subset, node): return
|
||||
s = string.replace(node.data, "&", "&")
|
||||
s = string.replace(s, "<", "<")
|
||||
s = string.replace(s, ">", ">")
|
||||
s = string.replace(s, "\015", "
")
|
||||
if s: self.write(s)
|
||||
handlers[Node.TEXT_NODE] = _do_text
|
||||
handlers[Node.CDATA_SECTION_NODE] = _do_text
|
||||
|
||||
|
||||
def _do_pi(self, node):
|
||||
'''_do_pi(self, node) -> None
|
||||
Process a PI node. Render a leading or trailing #xA if the
|
||||
document order of the PI is greater or lesser (respectively)
|
||||
than the document element.
|
||||
'''
|
||||
if not _in_subset(self.subset, node): return
|
||||
W = self.write
|
||||
if self.documentOrder == _GreaterElement: W('\n')
|
||||
W('<?')
|
||||
W(node.nodeName)
|
||||
s = node.data
|
||||
if s:
|
||||
W(' ')
|
||||
W(s)
|
||||
W('?>')
|
||||
if self.documentOrder == _LesserElement: W('\n')
|
||||
handlers[Node.PROCESSING_INSTRUCTION_NODE] = _do_pi
|
||||
|
||||
|
||||
def _do_comment(self, node):
|
||||
'''_do_comment(self, node) -> None
|
||||
Process a comment node. Render a leading or trailing #xA if the
|
||||
document order of the comment is greater or lesser (respectively)
|
||||
than the document element.
|
||||
'''
|
||||
if not _in_subset(self.subset, node): return
|
||||
if self.comments:
|
||||
W = self.write
|
||||
if self.documentOrder == _GreaterElement: W('\n')
|
||||
W('<!--')
|
||||
W(node.data)
|
||||
W('-->')
|
||||
if self.documentOrder == _LesserElement: W('\n')
|
||||
handlers[Node.COMMENT_NODE] = _do_comment
|
||||
|
||||
|
||||
def _do_attr(self, n, value):
|
||||
''''_do_attr(self, node) -> None
|
||||
Process an attribute.'''
|
||||
|
||||
W = self.write
|
||||
W(' ')
|
||||
W(n)
|
||||
W('="')
|
||||
s = string.replace(value, "&", "&")
|
||||
s = string.replace(s, "<", "<")
|
||||
s = string.replace(s, '"', '"')
|
||||
s = string.replace(s, '\011', '	')
|
||||
s = string.replace(s, '\012', '
')
|
||||
s = string.replace(s, '\015', '
')
|
||||
W(s)
|
||||
W('"')
|
||||
|
||||
|
||||
def _do_element(self, node, initial_other_attrs = [], unused = None):
|
||||
'''_do_element(self, node, initial_other_attrs = [], unused = {}) -> None
|
||||
Process an element (and its children).'''
|
||||
|
||||
# Get state (from the stack) make local copies.
|
||||
# ns_parent -- NS declarations in parent
|
||||
# ns_rendered -- NS nodes rendered by ancestors
|
||||
# ns_local -- NS declarations relevant to this element
|
||||
# xml_attrs -- Attributes in XML namespace from parent
|
||||
# xml_attrs_local -- Local attributes in XML namespace.
|
||||
# ns_unused_inherited -- not rendered namespaces, used for exclusive
|
||||
ns_parent, ns_rendered, xml_attrs = \
|
||||
self.state[0], self.state[1].copy(), self.state[2].copy() #0422
|
||||
|
||||
ns_unused_inherited = unused
|
||||
if unused is None:
|
||||
ns_unused_inherited = self.state[3].copy()
|
||||
|
||||
ns_local = ns_parent.copy()
|
||||
inclusive = _inclusive(self)
|
||||
xml_attrs_local = {}
|
||||
|
||||
# Divide attributes into NS, XML, and others.
|
||||
other_attrs = []
|
||||
in_subset = _in_subset(self.subset, node)
|
||||
for a in initial_other_attrs + _attrs(node):
|
||||
if a.namespaceURI == XMLNS.BASE:
|
||||
n = a.nodeName
|
||||
if n == "xmlns:": n = "xmlns" # DOM bug workaround
|
||||
ns_local[n] = a.nodeValue
|
||||
elif a.namespaceURI == XMLNS.XML:
|
||||
if inclusive or (in_subset and _in_subset(self.subset, a)): #020925 Test to see if attribute node in subset
|
||||
xml_attrs_local[a.nodeName] = a #0426
|
||||
else:
|
||||
if _in_subset(self.subset, a): #020925 Test to see if attribute node in subset
|
||||
other_attrs.append(a)
|
||||
|
||||
# # TODO: exclusive, might need to define xmlns:prefix here
|
||||
# if not inclusive and a.prefix is not None and not ns_rendered.has_key('xmlns:%s' %a.prefix):
|
||||
# ns_local['xmlns:%s' %a.prefix] = ??
|
||||
|
||||
#add local xml:foo attributes to ancestor's xml:foo attributes
|
||||
xml_attrs.update(xml_attrs_local)
|
||||
|
||||
# Render the node
|
||||
W, name = self.write, None
|
||||
if in_subset:
|
||||
name = node.nodeName
|
||||
if not inclusive:
|
||||
if node.prefix is not None:
|
||||
prefix = 'xmlns:%s' %node.prefix
|
||||
else:
|
||||
prefix = 'xmlns'
|
||||
|
||||
if not ns_rendered.has_key(prefix) and not ns_local.has_key(prefix):
|
||||
if not ns_unused_inherited.has_key(prefix):
|
||||
raise RuntimeError(\
|
||||
'For exclusive c14n, unable to map prefix "%s" in %s' %(
|
||||
prefix, node))
|
||||
|
||||
ns_local[prefix] = ns_unused_inherited[prefix]
|
||||
del ns_unused_inherited[prefix]
|
||||
|
||||
W('<')
|
||||
W(name)
|
||||
|
||||
# Create list of NS attributes to render.
|
||||
ns_to_render = []
|
||||
for n,v in ns_local.items():
|
||||
|
||||
# If default namespace is XMLNS.BASE or empty,
|
||||
# and if an ancestor was the same
|
||||
if n == "xmlns" and v in [ XMLNS.BASE, '' ] \
|
||||
and ns_rendered.get('xmlns') in [ XMLNS.BASE, '', None ]:
|
||||
continue
|
||||
|
||||
# "omit namespace node with local name xml, which defines
|
||||
# the xml prefix, if its string value is
|
||||
# http://www.w3.org/XML/1998/namespace."
|
||||
if n in ["xmlns:xml", "xml"] \
|
||||
and v in [ 'http://www.w3.org/XML/1998/namespace' ]:
|
||||
continue
|
||||
|
||||
|
||||
# If not previously rendered
|
||||
# and it's inclusive or utilized
|
||||
if (n,v) not in ns_rendered.items():
|
||||
if inclusive or _utilized(n, node, other_attrs, self.unsuppressedPrefixes):
|
||||
ns_to_render.append((n, v))
|
||||
elif not inclusive:
|
||||
ns_unused_inherited[n] = v
|
||||
|
||||
# Sort and render the ns, marking what was rendered.
|
||||
ns_to_render.sort(_sorter_ns)
|
||||
for n,v in ns_to_render:
|
||||
self._do_attr(n, v)
|
||||
ns_rendered[n]=v #0417
|
||||
|
||||
# If exclusive or the parent is in the subset, add the local xml attributes
|
||||
# Else, add all local and ancestor xml attributes
|
||||
# Sort and render the attributes.
|
||||
if not inclusive or _in_subset(self.subset,node.parentNode): #0426
|
||||
other_attrs.extend(xml_attrs_local.values())
|
||||
else:
|
||||
other_attrs.extend(xml_attrs.values())
|
||||
other_attrs.sort(_sorter)
|
||||
for a in other_attrs:
|
||||
self._do_attr(a.nodeName, a.value)
|
||||
W('>')
|
||||
|
||||
# Push state, recurse, pop state.
|
||||
state, self.state = self.state, (ns_local, ns_rendered, xml_attrs, ns_unused_inherited)
|
||||
for c in _children(node):
|
||||
_implementation.handlers[c.nodeType](self, c)
|
||||
self.state = state
|
||||
|
||||
if name: W('</%s>' % name)
|
||||
handlers[Node.ELEMENT_NODE] = _do_element
|
||||
|
||||
|
||||
def Canonicalize(node, output=None, **kw):
|
||||
'''Canonicalize(node, output=None, **kw) -> UTF-8
|
||||
|
||||
Canonicalize a DOM document/element node and all descendents.
|
||||
Return the text; if output is specified then output.write will
|
||||
be called to output the text and None will be returned
|
||||
Keyword parameters:
|
||||
nsdict: a dictionary of prefix:uri namespace entries
|
||||
assumed to exist in the surrounding context
|
||||
comments: keep comments if non-zero (default is 0)
|
||||
subset: Canonical XML subsetting resulting from XPath
|
||||
(default is [])
|
||||
unsuppressedPrefixes: do exclusive C14N, and this specifies the
|
||||
prefixes that should be inherited.
|
||||
'''
|
||||
if output:
|
||||
apply(_implementation, (node, output.write), kw)
|
||||
else:
|
||||
s = StringIO.StringIO()
|
||||
apply(_implementation, (node, s.write), kw)
|
||||
return s.getvalue()
|
||||
@@ -6,7 +6,7 @@
|
||||
# version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
@@ -21,28 +21,30 @@ try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
import copy
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import warnings
|
||||
|
||||
from . import __author__, __copyright__, __license__, __version__, TIMEOUT
|
||||
from .simplexml import SimpleXMLElement, TYPE_MAP, REVERSE_TYPE_MAP, OrderedDict
|
||||
from .simplexml import SimpleXMLElement, TYPE_MAP, REVERSE_TYPE_MAP, Struct
|
||||
from .transport import get_http_wrapper, set_http_wrapper, get_Http
|
||||
# Utility functions used throughout wsdl_parse, moved aside for readability
|
||||
from .helpers import fetch, sort_dict, make_key, process_element, \
|
||||
from .helpers import Alias, fetch, sort_dict, make_key, process_element, \
|
||||
postprocess_element, get_message, preprocess_schema, \
|
||||
get_local_name, get_namespace_prefix, TYPE_MAP, urlsplit
|
||||
|
||||
from .wsse import UsernameToken
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SoapFault(RuntimeError):
|
||||
def __init__(self, faultcode, faultstring):
|
||||
def __init__(self, faultcode, faultstring, detail=None):
|
||||
self.faultcode = faultcode
|
||||
self.faultstring = faultstring
|
||||
RuntimeError.__init__(self, faultcode, faultstring)
|
||||
self.detail = detail
|
||||
RuntimeError.__init__(self, faultcode, faultstring, detail)
|
||||
|
||||
def __unicode__(self):
|
||||
return '%s: %s' % (self.faultcode, self.faultstring)
|
||||
@@ -54,8 +56,9 @@ class SoapFault(RuntimeError):
|
||||
return self.__unicode__().encode('ascii', 'ignore')
|
||||
|
||||
def __repr__(self):
|
||||
return "SoapFault(%s, %s)" % (repr(self.faultcode),
|
||||
repr(self.faultstring))
|
||||
return "SoapFault(faultcode = %s, faultstring %s, detail = %s)" % (repr(self.faultcode),
|
||||
repr(self.faultstring),
|
||||
repr(self.detail))
|
||||
|
||||
|
||||
# soap protocol specification & namespace
|
||||
@@ -76,30 +79,38 @@ class SoapClient(object):
|
||||
sessions=False, soap_server=None, timeout=TIMEOUT,
|
||||
http_headers=None, trace=False,
|
||||
username=None, password=None,
|
||||
key_file=None, plugins=None, strict=True,
|
||||
):
|
||||
"""
|
||||
:param http_headers: Additional HTTP Headers; example: {'Host': 'ipsec.example.com'}
|
||||
"""
|
||||
self.certssl = cert
|
||||
self.keyssl = None
|
||||
self.keyssl = key_file
|
||||
self.location = location # server location (url)
|
||||
self.action = action # SOAP base action
|
||||
self.namespace = namespace # message
|
||||
self.exceptions = exceptions # lanzar execpiones? (Soap Faults)
|
||||
self.xml_request = self.xml_response = ''
|
||||
self.http_headers = http_headers or {}
|
||||
self.plugins = plugins or []
|
||||
self.strict = strict
|
||||
# extract the base directory / url for wsdl relative imports:
|
||||
if wsdl and wsdl_basedir == '':
|
||||
# parse the wsdl url, strip the scheme and filename
|
||||
url_scheme, netloc, path, query, fragment = urlsplit(wsdl)
|
||||
wsdl_basedir = os.path.dirname(netloc + path)
|
||||
|
||||
|
||||
self.wsdl_basedir = wsdl_basedir
|
||||
|
||||
|
||||
# shortcut to print all debugging info and sent / received xml messages
|
||||
if trace:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
if trace is True:
|
||||
level = logging.DEBUG # default logging level
|
||||
else:
|
||||
level = trace # use the provided level
|
||||
logging.basicConfig(level=level)
|
||||
log.setLevel(level)
|
||||
|
||||
if not soap_ns and not ns:
|
||||
self.__soap_ns = 'soap' # 1.1
|
||||
elif not soap_ns and ns:
|
||||
@@ -112,7 +123,7 @@ class SoapClient(object):
|
||||
|
||||
# SOAP Header support
|
||||
self.__headers = {} # general headers
|
||||
self.__call_headers = None # OrderedDict to be marshalled for RPC Call
|
||||
self.__call_headers = None # Struct to be marshalled for RPC Call
|
||||
|
||||
# check if the Certification Authority Cert is a string and store it
|
||||
if cacert and cacert.startswith('-----BEGIN CERTIFICATE-----'):
|
||||
@@ -130,7 +141,10 @@ class SoapClient(object):
|
||||
if username and password:
|
||||
if hasattr(self.http, 'add_credentials'):
|
||||
self.http.add_credentials(username, password)
|
||||
|
||||
if cert and key_file:
|
||||
if hasattr(self.http, 'add_certificate'):
|
||||
self.http.add_certificate(key=key_file, cert=cert, domain='')
|
||||
|
||||
|
||||
# namespace prefix, None to use xmlns attribute or False to not use it:
|
||||
self.__ns = ns
|
||||
@@ -149,11 +163,7 @@ class SoapClient(object):
|
||||
self.__xml = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<%(soap_ns)s:Envelope xmlns:%(soap_ns)s="%(soap_uri)s" xmlns:%(ns)s="%(namespace)s">
|
||||
<%(soap_ns)s:Header/>
|
||||
<%(soap_ns)s:Body>
|
||||
<%(ns)s:%(method)s>
|
||||
</%(ns)s:%(method)s>
|
||||
</%(soap_ns)s:Body>
|
||||
</%(soap_ns)s:Envelope>"""
|
||||
<%(soap_ns)s:Body><%(ns)s:%(method)s></%(ns)s:%(method)s></%(soap_ns)s:Body></%(soap_ns)s:Envelope>"""
|
||||
|
||||
# parse wsdl url
|
||||
self.services = wsdl and self.wsdl_parse(wsdl, cache=cache)
|
||||
@@ -162,7 +172,7 @@ class SoapClient(object):
|
||||
def __getattr__(self, attr):
|
||||
"""Return a pseudo-method that can be called"""
|
||||
if not self.services: # not using WSDL?
|
||||
return lambda self=self, *args, **kwargs: self.call(attr, *args, **kwargs)
|
||||
return lambda *args, **kwargs: self.call(attr, *args, **kwargs)
|
||||
else: # using WSDL:
|
||||
return lambda *args, **kwargs: self.wsdl_call(attr, *args, **kwargs)
|
||||
|
||||
@@ -175,12 +185,13 @@ class SoapClient(object):
|
||||
"""
|
||||
#TODO: method != input_message
|
||||
# Basic SOAP request:
|
||||
soap_uri = soap_namespaces[self.__soap_ns]
|
||||
xml = self.__xml % dict(method=method, # method tag name
|
||||
namespace=self.namespace, # method ns uri
|
||||
ns=self.__ns, # method ns prefix
|
||||
soap_ns=self.__soap_ns, # soap prefix & uri
|
||||
soap_uri=soap_namespaces[self.__soap_ns])
|
||||
request = SimpleXMLElement(xml, namespace=self.__ns and self.namespace,
|
||||
soap_uri=soap_uri)
|
||||
request = SimpleXMLElement(xml, namespace=self.__ns and self.namespace,
|
||||
prefix=self.__ns)
|
||||
|
||||
request_headers = kwargs.pop('headers', None)
|
||||
@@ -191,18 +202,21 @@ class SoapClient(object):
|
||||
else:
|
||||
parameters = args
|
||||
if parameters and isinstance(parameters[0], SimpleXMLElement):
|
||||
body = request('Body', ns=list(soap_namespaces.values()),)
|
||||
# remove default body parameter (method name)
|
||||
delattr(body, method)
|
||||
# merge xmlelement parameter ("raw" - already marshalled)
|
||||
if parameters[0].children() is not None:
|
||||
for param in parameters[0].children():
|
||||
getattr(request, method).import_node(param)
|
||||
for k,v in parameters[0].attributes().items():
|
||||
getattr(request, method)[k] = v
|
||||
body.import_node(parameters[0])
|
||||
elif parameters:
|
||||
# marshall parameters:
|
||||
use_ns = None if (self.__soap_server == "jetty" or self.qualified is False) else True
|
||||
for k, v in parameters: # dict: tag=valor
|
||||
getattr(request, method).marshall(k, v, ns=use_ns)
|
||||
elif not self.__soap_server in ('oracle',) or self.__soap_server in ('jbossas6',):
|
||||
if hasattr(v, "namespaces") and use_ns:
|
||||
ns = v.namespaces.get(None, True)
|
||||
else:
|
||||
ns = use_ns
|
||||
getattr(request, method).marshall(k, v, ns=ns)
|
||||
elif self.__soap_server in ('jbossas6',):
|
||||
# JBossAS-6 requires no empty method parameters!
|
||||
delattr(request("Body", ns=list(soap_namespaces.values()),), method)
|
||||
|
||||
@@ -210,15 +224,12 @@ class SoapClient(object):
|
||||
if self.__headers and not self.services:
|
||||
self.__call_headers = dict([(k, v) for k, v in self.__headers.items()
|
||||
if not k.startswith('wsse:')])
|
||||
# always extract WS Security header and send it
|
||||
if 'wsse:Security' in self.__headers:
|
||||
#TODO: namespaces too hardwired, clean-up...
|
||||
header = request('Header', ns=list(soap_namespaces.values()),)
|
||||
k = 'wsse:Security'
|
||||
v = self.__headers[k]
|
||||
header.marshall(k, v, ns=False, add_children_ns=False)
|
||||
header(k)['xmlns:wsse'] = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
|
||||
#<wsse:UsernameToken xmlns:wsu='http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'>
|
||||
# always extract WS Security header and send it (backward compatible)
|
||||
if 'wsse:Security' in self.__headers and not self.plugins:
|
||||
warnings.warn("Replace wsse:Security with UsernameToken plugin",
|
||||
DeprecationWarning)
|
||||
self.plugins.append(UsernameToken())
|
||||
|
||||
if self.__call_headers:
|
||||
header = request('Header', ns=list(soap_namespaces.values()),)
|
||||
for k, v in self.__call_headers.items():
|
||||
@@ -236,18 +247,46 @@ class SoapClient(object):
|
||||
for subheader in request_headers.children():
|
||||
header.import_node(subheader)
|
||||
|
||||
# do pre-processing using plugins (i.e. WSSE signing)
|
||||
for plugin in self.plugins:
|
||||
plugin.preprocess(self, request, method, args, kwargs,
|
||||
self.__headers, soap_uri)
|
||||
|
||||
self.xml_request = request.as_xml()
|
||||
self.xml_response = self.send(method, self.xml_request)
|
||||
response = SimpleXMLElement(self.xml_response, namespace=self.namespace,
|
||||
jetty=self.__soap_server in ('jetty',))
|
||||
if self.exceptions and response("Fault", ns=list(soap_namespaces.values()), error=False):
|
||||
raise SoapFault(unicode(response.faultcode), unicode(response.faultstring))
|
||||
detailXml = response("detail", ns=list(soap_namespaces.values()), error=False)
|
||||
detail = None
|
||||
|
||||
if detailXml and detailXml.children():
|
||||
if self.services is not None:
|
||||
operation = self.get_operation(method)
|
||||
fault_name = detailXml.children()[0].get_name()
|
||||
# if fault not defined in WSDL, it could be an axis or other
|
||||
# standard type (i.e. "hostname"), try to convert it to string
|
||||
fault = operation['faults'].get(fault_name) or unicode
|
||||
detail = detailXml.children()[0].unmarshall(fault, strict=False)
|
||||
else:
|
||||
detail = repr(detailXml.children())
|
||||
|
||||
raise SoapFault(unicode(response.faultcode),
|
||||
unicode(response.faultstring),
|
||||
detail)
|
||||
|
||||
# do post-processing using plugins (i.e. WSSE signature verification)
|
||||
for plugin in self.plugins:
|
||||
plugin.postprocess(self, response, method, args, kwargs,
|
||||
self.__headers, soap_uri)
|
||||
|
||||
return response
|
||||
|
||||
def send(self, method, xml):
|
||||
"""Send SOAP request using HTTP"""
|
||||
if self.location == 'test': return
|
||||
# location = '%s' % self.location #?op=%s" % (self.location, method)
|
||||
http_method = str('POST')
|
||||
location = str(self.location)
|
||||
|
||||
if self.services:
|
||||
@@ -258,15 +297,25 @@ class SoapClient(object):
|
||||
headers = {
|
||||
'Content-type': 'text/xml; charset="UTF-8"',
|
||||
'Content-length': str(len(xml)),
|
||||
'SOAPAction': '"%s"' % soap_action
|
||||
}
|
||||
|
||||
if self.action is not None:
|
||||
headers['SOAPAction'] = soap_action
|
||||
|
||||
headers.update(self.http_headers)
|
||||
log.info("POST %s" % location)
|
||||
log.debug('\n'.join(["%s: %s" % (k, v) for k, v in headers.items()]))
|
||||
log.debug(xml)
|
||||
|
||||
if sys.version < '3':
|
||||
# Ensure http_method, location and all headers are binary to prevent
|
||||
# UnicodeError inside httplib.HTTPConnection._send_output.
|
||||
|
||||
# httplib in python3 do the same inside itself, don't need to convert it here
|
||||
headers = dict((str(k), str(v)) for k, v in headers.items())
|
||||
|
||||
response, content = self.http.request(
|
||||
location, 'POST', body=xml, headers=headers)
|
||||
location, http_method, body=xml, headers=headers)
|
||||
self.response = response
|
||||
self.content = content
|
||||
|
||||
@@ -298,6 +347,10 @@ class SoapClient(object):
|
||||
return operation
|
||||
|
||||
def wsdl_call(self, method, *args, **kwargs):
|
||||
"""Pre and post process SOAP call, input and output parameters using WSDL"""
|
||||
return self.wsdl_call_with_args(method, args, kwargs)
|
||||
|
||||
def wsdl_call_with_args(self, method, args, kwargs):
|
||||
"""Pre and post process SOAP call, input and output parameters using WSDL"""
|
||||
soap_uri = soap_namespaces[self.__soap_ns]
|
||||
operation = self.get_operation(method)
|
||||
@@ -308,23 +361,23 @@ class SoapClient(object):
|
||||
header = operation.get('header')
|
||||
if 'action' in operation:
|
||||
self.action = operation['action']
|
||||
|
||||
|
||||
if 'namespace' in operation:
|
||||
self.namespace = operation['namespace'] or ''
|
||||
self.qualified = operation['qualified']
|
||||
self.qualified = operation['qualified']
|
||||
|
||||
# construct header and parameters
|
||||
if header:
|
||||
self.__call_headers = sort_dict(header, self.__headers)
|
||||
method, params = self.wsdl_call_get_params(method, input, *args, **kwargs)
|
||||
method, params = self.wsdl_call_get_params(method, input, args, kwargs)
|
||||
|
||||
# call remote procedure
|
||||
response = self.call(method, *params)
|
||||
# parse results:
|
||||
resp = response('Body', ns=soap_uri).children().unmarshall(output)
|
||||
resp = response('Body', ns=soap_uri).children().unmarshall(output, strict=self.strict)
|
||||
return resp and list(resp.values())[0] # pass Response tag children
|
||||
|
||||
def wsdl_call_get_params(self, method, input, *args, **kwargs):
|
||||
def wsdl_call_get_params(self, method, input, args, kwargs):
|
||||
"""Build params from input and args/kwargs"""
|
||||
params = inputname = inputargs = None
|
||||
all_args = {}
|
||||
@@ -338,10 +391,9 @@ class SoapClient(object):
|
||||
for idx, arg in enumerate(args):
|
||||
key = list(inputargs.keys())[idx]
|
||||
if isinstance(arg, dict):
|
||||
if key in arg:
|
||||
d[key] = arg[key]
|
||||
else:
|
||||
raise KeyError('Unhandled key %s. use client.help(method)')
|
||||
if key not in arg:
|
||||
raise KeyError('Unhandled key %s. use client.help(method)' % key)
|
||||
d[key] = arg[key]
|
||||
else:
|
||||
d[key] = arg
|
||||
all_args.update({inputname: d})
|
||||
@@ -352,7 +404,17 @@ class SoapClient(object):
|
||||
valid, errors, warnings = self.wsdl_validate_params(input, all_args)
|
||||
if not valid:
|
||||
raise ValueError('Invalid Args Structure. Errors: %s' % errors)
|
||||
params = list(sort_dict(input, all_args).values())[0].items()
|
||||
# sort and filter parameters according to wsdl input structure
|
||||
tree = sort_dict(input, all_args)
|
||||
root = list(tree.values())[0]
|
||||
params = []
|
||||
# make a params tuple list suitable for self.call(method, *params)
|
||||
for k, v in root.items():
|
||||
# fix referenced namespaces as info is lost when calling call
|
||||
root_ns = root.namespaces[k]
|
||||
if not root.references[k] and isinstance(v, Struct):
|
||||
v.namespaces[None] = root_ns
|
||||
params.append((k, v))
|
||||
# TODO: check style and document attributes
|
||||
if self.__soap_server in ('axis', ):
|
||||
# use the operation name
|
||||
@@ -368,7 +430,7 @@ class SoapClient(object):
|
||||
return (method, params)
|
||||
|
||||
def wsdl_validate_params(self, struct, value):
|
||||
"""Validate the arguments (actual values) for the parameters structure.
|
||||
"""Validate the arguments (actual values) for the parameters structure.
|
||||
Fail for any invalid arguments or type mismatches."""
|
||||
errors = []
|
||||
warnings = []
|
||||
@@ -378,15 +440,15 @@ class SoapClient(object):
|
||||
if type(struct) == type(value):
|
||||
typematch = True
|
||||
if not isinstance(struct, dict) and isinstance(value, dict):
|
||||
typematch = True # struct can be an OrderedDict
|
||||
typematch = True # struct can be a dict or derived (Struct)
|
||||
else:
|
||||
typematch = False
|
||||
|
||||
if struct == str:
|
||||
struct = unicode # fix for py2 vs py3 string handling
|
||||
|
||||
|
||||
if not isinstance(struct, (list, dict, tuple)) and struct in TYPE_MAP.keys():
|
||||
if not type(value) == struct:
|
||||
if not type(value) == struct and value is not None:
|
||||
try:
|
||||
struct(value) # attempt to cast input to parameter type
|
||||
except:
|
||||
@@ -459,6 +521,313 @@ class SoapClient(object):
|
||||
headers,
|
||||
)
|
||||
|
||||
soap_ns_uris = {
|
||||
'http://schemas.xmlsoap.org/wsdl/soap/': 'soap11',
|
||||
'http://schemas.xmlsoap.org/wsdl/soap12/': 'soap12',
|
||||
}
|
||||
wsdl_uri = 'http://schemas.xmlsoap.org/wsdl/'
|
||||
xsd_uri = 'http://www.w3.org/2001/XMLSchema'
|
||||
xsi_uri = 'http://www.w3.org/2001/XMLSchema-instance'
|
||||
|
||||
def _url_to_xml_tree(self, url, cache, force_download):
|
||||
"""Unmarshall the WSDL at the given url into a tree of SimpleXMLElement nodes"""
|
||||
# Open uri and read xml:
|
||||
xml = fetch(url, self.http, cache, force_download, self.wsdl_basedir, self.http_headers)
|
||||
# Parse WSDL XML:
|
||||
wsdl = SimpleXMLElement(xml, namespace=self.wsdl_uri)
|
||||
|
||||
# Extract useful data:
|
||||
self.namespace = ""
|
||||
self.documentation = unicode(wsdl('documentation', error=False)) or ''
|
||||
|
||||
# some wsdl are split down in several files, join them:
|
||||
imported_wsdls = {}
|
||||
for element in wsdl.children() or []:
|
||||
if element.get_local_name() in ('import'):
|
||||
wsdl_namespace = element['namespace']
|
||||
wsdl_location = element['location']
|
||||
if wsdl_location is None:
|
||||
log.warning('WSDL location not provided for %s!' % wsdl_namespace)
|
||||
continue
|
||||
if wsdl_location in imported_wsdls:
|
||||
log.warning('WSDL %s already imported!' % wsdl_location)
|
||||
continue
|
||||
imported_wsdls[wsdl_location] = wsdl_namespace
|
||||
log.debug('Importing wsdl %s from %s' % (wsdl_namespace, wsdl_location))
|
||||
# Open uri and read xml:
|
||||
xml = fetch(wsdl_location, self.http, cache, force_download, self.wsdl_basedir, self.http_headers)
|
||||
# Parse imported XML schema (recursively):
|
||||
imported_wsdl = SimpleXMLElement(xml, namespace=self.xsd_uri)
|
||||
# merge the imported wsdl into the main document:
|
||||
wsdl.import_node(imported_wsdl)
|
||||
# warning: do not process schemas to avoid infinite recursion!
|
||||
|
||||
return wsdl
|
||||
|
||||
def _xml_tree_to_services(self, wsdl, cache, force_download):
|
||||
"""Convert SimpleXMLElement tree representation of the WSDL into pythonic objects"""
|
||||
# detect soap prefix and uri (xmlns attributes of <definitions>)
|
||||
xsd_ns = None
|
||||
soap_uris = {}
|
||||
for k, v in wsdl[:]:
|
||||
if v in self.soap_ns_uris and k.startswith('xmlns:'):
|
||||
soap_uris[get_local_name(k)] = v
|
||||
if v == self.xsd_uri and k.startswith('xmlns:'):
|
||||
xsd_ns = get_local_name(k)
|
||||
|
||||
elements = {} # element: type def
|
||||
messages = {} # message: element
|
||||
port_types = {} # port_type_name: port_type
|
||||
bindings = {} # binding_name: binding
|
||||
services = {} # service_name: service
|
||||
|
||||
# check axis2 namespace at schema types attributes (europa.eu checkVat)
|
||||
if "http://xml.apache.org/xml-soap" in dict(wsdl[:]).values():
|
||||
# get the sub-namespace in the first schema element (see issue 8)
|
||||
if wsdl('types', error=False):
|
||||
schema = wsdl.types('schema', ns=self.xsd_uri)
|
||||
attrs = dict(schema[:])
|
||||
self.namespace = attrs.get('targetNamespace', self.namespace)
|
||||
if not self.namespace or self.namespace == "urn:DefaultNamespace":
|
||||
self.namespace = wsdl['targetNamespace'] or self.namespace
|
||||
|
||||
imported_schemas = {}
|
||||
global_namespaces = {None: self.namespace}
|
||||
|
||||
# process current wsdl schema (if any, or many if imported):
|
||||
# <wsdl:definitions>
|
||||
# <wsdl:types>
|
||||
# <xs:schema>
|
||||
# <xs:element>
|
||||
# <xs:complexType>...</xs:complexType>
|
||||
# or
|
||||
# <xs:.../>
|
||||
# </xs:element>
|
||||
# </xs:schema>
|
||||
# </wsdl:types>
|
||||
# </wsdl:definitions>
|
||||
|
||||
for types in wsdl('types', error=False) or []:
|
||||
# avoid issue if schema is not given in the main WSDL file
|
||||
schemas = types('schema', ns=self.xsd_uri, error=False)
|
||||
for schema in schemas or []:
|
||||
preprocess_schema(schema, imported_schemas, elements, self.xsd_uri,
|
||||
self.__soap_server, self.http, cache,
|
||||
force_download, self.wsdl_basedir,
|
||||
global_namespaces=global_namespaces)
|
||||
|
||||
# 2nd phase: alias, postdefined elements, extend bases, convert lists
|
||||
postprocess_element(elements, [])
|
||||
|
||||
for message in wsdl.message:
|
||||
for part in message('part', error=False) or []:
|
||||
element = {}
|
||||
element_name = part['element']
|
||||
if not element_name:
|
||||
# some implementations (axis) uses type instead
|
||||
element_name = part['type']
|
||||
type_ns = get_namespace_prefix(element_name)
|
||||
type_uri = part.get_namespace_uri(type_ns)
|
||||
part_name = part['name'] or None
|
||||
if type_uri == self.xsd_uri:
|
||||
element_name = get_local_name(element_name)
|
||||
fn = REVERSE_TYPE_MAP.get(element_name, None)
|
||||
element = {part_name: fn}
|
||||
# emulate a true Element (complexType) for rpc style
|
||||
if (message['name'], part_name) not in messages:
|
||||
od = Struct()
|
||||
od.namespaces[None] = type_uri
|
||||
messages[(message['name'], part_name)] = {message['name']: od}
|
||||
else:
|
||||
od = messages[(message['name'], part_name)].values()[0]
|
||||
od.namespaces[part_name] = type_uri
|
||||
od.references[part_name] = False
|
||||
od.update(element)
|
||||
else:
|
||||
element_name = get_local_name(element_name)
|
||||
fn = elements.get(make_key(element_name, 'element', type_uri))
|
||||
if not fn:
|
||||
# some axis servers uses complexType for part messages (rpc)
|
||||
fn = elements.get(make_key(element_name, 'complexType', type_uri))
|
||||
od = Struct()
|
||||
od[part_name] = fn
|
||||
od.namespaces[None] = type_uri
|
||||
od.namespaces[part_name] = type_uri
|
||||
od.references[part_name] = False
|
||||
element = {message['name']: od}
|
||||
else:
|
||||
element = {element_name: fn}
|
||||
messages[(message['name'], part_name)] = element
|
||||
|
||||
for port_type_node in wsdl.portType:
|
||||
port_type_name = port_type_node['name']
|
||||
port_type = port_types[port_type_name] = {}
|
||||
operations = port_type['operations'] = {}
|
||||
|
||||
for operation_node in port_type_node.operation:
|
||||
op_name = operation_node['name']
|
||||
op = operations[op_name] = {}
|
||||
op['style'] = operation_node['style']
|
||||
op['parameter_order'] = (operation_node['parameterOrder'] or "").split(" ")
|
||||
op['documentation'] = unicode(operation_node('documentation', error=False)) or ''
|
||||
|
||||
if operation_node('input', error=False):
|
||||
op['input_msg'] = get_local_name(operation_node.input['message'])
|
||||
ns = get_namespace_prefix(operation_node.input['message'])
|
||||
op['namespace'] = operation_node.get_namespace_uri(ns)
|
||||
|
||||
if operation_node('output', error=False):
|
||||
op['output_msg'] = get_local_name(operation_node.output['message'])
|
||||
|
||||
#Get all fault message types this operation may return
|
||||
fault_msgs = op['fault_msgs'] = {}
|
||||
faults = operation_node('fault', error=False)
|
||||
if faults is not None:
|
||||
for fault in operation_node('fault', error=False):
|
||||
fault_msgs[fault['name']] = get_local_name(fault['message'])
|
||||
|
||||
for binding_node in wsdl.binding:
|
||||
port_type_name = get_local_name(binding_node['type'])
|
||||
if port_type_name not in port_types:
|
||||
# Invalid port type
|
||||
continue
|
||||
port_type = port_types[port_type_name]
|
||||
binding_name = binding_node['name']
|
||||
soap_binding = binding_node('binding', ns=list(soap_uris.values()), error=False)
|
||||
transport = soap_binding and soap_binding['transport'] or None
|
||||
style = soap_binding and soap_binding['style'] or None # rpc
|
||||
|
||||
binding = bindings[binding_name] = {
|
||||
'name': binding_name,
|
||||
'operations': copy.deepcopy(port_type['operations']),
|
||||
'port_type_name': port_type_name,
|
||||
'transport': transport,
|
||||
'style': style,
|
||||
}
|
||||
|
||||
for operation_node in binding_node.operation:
|
||||
op_name = operation_node['name']
|
||||
op_op = operation_node('operation', ns=list(soap_uris.values()), error=False)
|
||||
action = op_op and op_op['soapAction']
|
||||
|
||||
op = binding['operations'].setdefault(op_name, {})
|
||||
op['name'] = op_name
|
||||
op['style'] = op.get('style', style)
|
||||
if action is not None:
|
||||
op['action'] = action
|
||||
|
||||
# input and/or output can be not present!
|
||||
input = operation_node('input', error=False)
|
||||
body = input and input('body', ns=list(soap_uris.values()), error=False)
|
||||
parts_input_body = body and body['parts'] or None
|
||||
|
||||
# parse optional header messages (some implementations use more than one!)
|
||||
parts_input_headers = []
|
||||
headers = input and input('header', ns=list(soap_uris.values()), error=False)
|
||||
for header in headers or []:
|
||||
hdr = {'message': header['message'], 'part': header['part']}
|
||||
parts_input_headers.append(hdr)
|
||||
|
||||
if 'input_msg' in op:
|
||||
headers = {} # base header message structure
|
||||
for input_header in parts_input_headers:
|
||||
header_msg = get_local_name(input_header.get('message'))
|
||||
header_part = get_local_name(input_header.get('part'))
|
||||
# warning: some implementations use a separate message!
|
||||
hdr = get_message(messages, header_msg or op['input_msg'], header_part)
|
||||
if hdr:
|
||||
headers.update(hdr)
|
||||
else:
|
||||
pass # not enough info to search the header message:
|
||||
op['input'] = get_message(messages, op['input_msg'], parts_input_body, op['parameter_order'])
|
||||
op['header'] = headers
|
||||
|
||||
try:
|
||||
element = list(op['input'].values())[0]
|
||||
ns_uri = element.namespaces[None]
|
||||
qualified = element.qualified
|
||||
except (AttributeError, KeyError) as e:
|
||||
# TODO: fix if no parameters parsed or "variants"
|
||||
ns_uri = op['namespace']
|
||||
qualified = None
|
||||
if ns_uri:
|
||||
op['namespace'] = ns_uri
|
||||
op['qualified'] = qualified
|
||||
|
||||
# Remove temporary property
|
||||
del op['input_msg']
|
||||
|
||||
else:
|
||||
op['input'] = None
|
||||
op['header'] = None
|
||||
|
||||
output = operation_node('output', error=False)
|
||||
body = output and output('body', ns=list(soap_uris.values()), error=False)
|
||||
parts_output_body = body and body['parts'] or None
|
||||
if 'output_msg' in op:
|
||||
op['output'] = get_message(messages, op['output_msg'], parts_output_body)
|
||||
# Remove temporary property
|
||||
del op['output_msg']
|
||||
else:
|
||||
op['output'] = None
|
||||
|
||||
if 'fault_msgs' in op:
|
||||
faults = op['faults'] = {}
|
||||
for msg in op['fault_msgs'].values():
|
||||
msg_obj = get_message(messages, msg, parts_output_body)
|
||||
tag_name = list(msg_obj)[0]
|
||||
faults[tag_name] = msg_obj
|
||||
|
||||
# useless? never used
|
||||
parts_output_headers = []
|
||||
headers = output and output('header', ns=list(soap_uris.values()), error=False)
|
||||
for header in headers or []:
|
||||
hdr = {'message': header['message'], 'part': header['part']}
|
||||
parts_output_headers.append(hdr)
|
||||
|
||||
|
||||
|
||||
|
||||
for service in wsdl("service", error=False) or []:
|
||||
service_name = service['name']
|
||||
if not service_name:
|
||||
continue # empty service?
|
||||
|
||||
serv = services.setdefault(service_name, {})
|
||||
ports = serv['ports'] = {}
|
||||
serv['documentation'] = service['documentation'] or ''
|
||||
for port in service.port:
|
||||
binding_name = get_local_name(port['binding'])
|
||||
|
||||
if not binding_name in bindings:
|
||||
continue # unknown binding
|
||||
|
||||
binding = ports[port['name']] = copy.deepcopy(bindings[binding_name])
|
||||
address = port('address', ns=list(soap_uris.values()), error=False)
|
||||
location = address and address['location'] or None
|
||||
soap_uri = address and soap_uris.get(address.get_prefix())
|
||||
soap_ver = soap_uri and self.soap_ns_uris.get(soap_uri)
|
||||
|
||||
binding.update({
|
||||
'location': location,
|
||||
'service_name': service_name,
|
||||
'soap_uri': soap_uri,
|
||||
'soap_ver': soap_ver,
|
||||
})
|
||||
|
||||
# create an default service if none is given in the wsdl:
|
||||
if not services:
|
||||
services[''] = {'ports': {'': None}}
|
||||
|
||||
elements = list(e for e in elements.values() if type(e) is type) + sorted(e for e in elements.values() if not(type(e) is type))
|
||||
e = None
|
||||
self.elements = []
|
||||
for element in elements:
|
||||
if e!= element: self.elements.append(element)
|
||||
e = element
|
||||
|
||||
return services
|
||||
|
||||
def wsdl_parse(self, url, cache=False):
|
||||
"""Parse Web Service Description v1.1"""
|
||||
|
||||
@@ -477,7 +846,6 @@ class SoapClient(object):
|
||||
f.close()
|
||||
# sanity check:
|
||||
if pkl['version'][:-1] != __version__.split(' ')[0][:-1] or pkl['url'] != url:
|
||||
import warnings
|
||||
warnings.warn('version or url mismatch! discarding cached wsdl', RuntimeWarning)
|
||||
log.debug('Version: %s %s' % (pkl['version'], __version__))
|
||||
log.debug('URL: %s %s' % (pkl['url'], url))
|
||||
@@ -487,211 +855,11 @@ class SoapClient(object):
|
||||
self.documentation = pkl['documentation']
|
||||
return pkl['services']
|
||||
|
||||
soap_ns = {
|
||||
'http://schemas.xmlsoap.org/wsdl/soap/': 'soap11',
|
||||
'http://schemas.xmlsoap.org/wsdl/soap12/': 'soap12',
|
||||
}
|
||||
wsdl_uri = 'http://schemas.xmlsoap.org/wsdl/'
|
||||
xsd_uri = 'http://www.w3.org/2001/XMLSchema'
|
||||
xsi_uri = 'http://www.w3.org/2001/XMLSchema-instance'
|
||||
|
||||
# always return an unicode object:
|
||||
REVERSE_TYPE_MAP['string'] = str
|
||||
|
||||
# Open uri and read xml:
|
||||
xml = fetch(url, self.http, cache, force_download, self.wsdl_basedir)
|
||||
# Parse WSDL XML:
|
||||
wsdl = SimpleXMLElement(xml, namespace=wsdl_uri)
|
||||
|
||||
# Extract useful data:
|
||||
self.namespace = ""
|
||||
self.documentation = unicode(wsdl('documentation', error=False)) or ''
|
||||
|
||||
# some wsdl are splitted down in several files, join them:
|
||||
imported_wsdls = {}
|
||||
for element in wsdl.children() or []:
|
||||
if element.get_local_name() in ('import'):
|
||||
wsdl_namespace = element['namespace']
|
||||
wsdl_location = element['location']
|
||||
if wsdl_location is None:
|
||||
log.warning('WSDL location not provided for %s!' % wsdl_namespace)
|
||||
continue
|
||||
if wsdl_location in imported_wsdls:
|
||||
log.warning('WSDL %s already imported!' % wsdl_location)
|
||||
continue
|
||||
imported_wsdls[wsdl_location] = wsdl_namespace
|
||||
log.debug('Importing wsdl %s from %s' % (wsdl_namespace, wsdl_location))
|
||||
# Open uri and read xml:
|
||||
xml = fetch(wsdl_location, self.http, cache, force_download, self.wsdl_basedir)
|
||||
# Parse imported XML schema (recursively):
|
||||
imported_wsdl = SimpleXMLElement(xml, namespace=xsd_uri)
|
||||
# merge the imported wsdl into the main document:
|
||||
wsdl.import_node(imported_wsdl)
|
||||
# warning: do not process schemas to avoid infinite recursion!
|
||||
|
||||
|
||||
# detect soap prefix and uri (xmlns attributes of <definitions>)
|
||||
xsd_ns = None
|
||||
soap_uris = {}
|
||||
for k, v in wsdl[:]:
|
||||
if v in soap_ns and k.startswith('xmlns:'):
|
||||
soap_uris[get_local_name(k)] = v
|
||||
if v == xsd_uri and k.startswith('xmlns:'):
|
||||
xsd_ns = get_local_name(k)
|
||||
|
||||
services = {}
|
||||
bindings = {} # binding_name: binding
|
||||
operations = {} # operation_name: operation
|
||||
port_type_bindings = {} # port_type_name: binding
|
||||
messages = {} # message: element
|
||||
elements = {} # element: type def
|
||||
|
||||
for service in wsdl.service:
|
||||
service_name = service['name']
|
||||
if not service_name:
|
||||
continue # empty service?
|
||||
serv = services.setdefault(service_name, {'ports': {}})
|
||||
serv['documentation'] = service['documentation'] or ''
|
||||
for port in service.port:
|
||||
binding_name = get_local_name(port['binding'])
|
||||
operations[binding_name] = {}
|
||||
address = port('address', ns=list(soap_uris.values()), error=False)
|
||||
location = address and address['location'] or None
|
||||
soap_uri = address and soap_uris.get(address.get_prefix())
|
||||
soap_ver = soap_uri and soap_ns.get(soap_uri)
|
||||
bindings[binding_name] = {'name': binding_name,
|
||||
'service_name': service_name,
|
||||
'location': location,
|
||||
'soap_uri': soap_uri,
|
||||
'soap_ver': soap_ver, }
|
||||
serv['ports'][port['name']] = bindings[binding_name]
|
||||
|
||||
for binding in wsdl.binding:
|
||||
binding_name = binding['name']
|
||||
soap_binding = binding('binding', ns=list(soap_uris.values()), error=False)
|
||||
transport = soap_binding and soap_binding['transport'] or None
|
||||
port_type_name = get_local_name(binding['type'])
|
||||
bindings[binding_name].update({
|
||||
'port_type_name': port_type_name,
|
||||
'transport': transport, 'operations': {},
|
||||
})
|
||||
if port_type_name not in port_type_bindings:
|
||||
port_type_bindings[port_type_name] = []
|
||||
port_type_bindings[port_type_name].append(bindings[binding_name])
|
||||
for operation in binding.operation:
|
||||
op_name = operation['name']
|
||||
op = operation('operation', ns=list(soap_uris.values()), error=False)
|
||||
action = op and op['soapAction']
|
||||
d = operations[binding_name].setdefault(op_name, {})
|
||||
bindings[binding_name]['operations'][op_name] = d
|
||||
d.update({'name': op_name})
|
||||
d['parts'] = {}
|
||||
# input and/or ouput can be not present!
|
||||
input = operation('input', error=False)
|
||||
body = input and input('body', ns=list(soap_uris.values()), error=False)
|
||||
d['parts']['input_body'] = body and body['parts'] or None
|
||||
output = operation('output', error=False)
|
||||
body = output and output('body', ns=list(soap_uris.values()), error=False)
|
||||
d['parts']['output_body'] = body and body['parts'] or None
|
||||
header = input and input('header', ns=list(soap_uris.values()), error=False)
|
||||
d['parts']['input_header'] = header and {'message': header['message'], 'part': header['part']} or None
|
||||
header = output and output('header', ns=list(soap_uris.values()), error=False)
|
||||
d['parts']['output_header'] = header and {'message': header['message'], 'part': header['part']} or None
|
||||
if action:
|
||||
d['action'] = action
|
||||
|
||||
# check axis2 namespace at schema types attributes (europa.eu checkVat)
|
||||
if "http://xml.apache.org/xml-soap" in dict(wsdl[:]).values():
|
||||
# get the sub-namespace in the first schema element (see issue 8)
|
||||
if wsdl('types', error=False):
|
||||
schema = wsdl.types('schema', ns=xsd_uri)
|
||||
attrs = dict(schema[:])
|
||||
self.namespace = attrs.get('targetNamespace', self.namespace)
|
||||
if not self.namespace or self.namespace == "urn:DefaultNamespace":
|
||||
self.namespace = wsdl['targetNamespace'] or self.namespace
|
||||
|
||||
imported_schemas = {}
|
||||
global_namespaces = {None: self.namespace}
|
||||
|
||||
# process current wsdl schema (if any):
|
||||
if wsdl('types', error=False):
|
||||
for schema in wsdl.types('schema', ns=xsd_uri):
|
||||
preprocess_schema(schema, imported_schemas, elements, xsd_uri,
|
||||
self.__soap_server, self.http, cache,
|
||||
force_download, self.wsdl_basedir,
|
||||
global_namespaces=global_namespaces)
|
||||
|
||||
# 2nd phase: alias, postdefined elements, extend bases, convert lists
|
||||
postprocess_element(elements, [])
|
||||
|
||||
for message in wsdl.message:
|
||||
for part in message('part', error=False) or []:
|
||||
element = {}
|
||||
element_name = part['element']
|
||||
if not element_name:
|
||||
# some implementations (axis) uses type instead
|
||||
element_name = part['type']
|
||||
type_ns = get_namespace_prefix(element_name)
|
||||
type_uri = wsdl.get_namespace_uri(type_ns)
|
||||
if type_uri == xsd_uri:
|
||||
element_name = get_local_name(element_name)
|
||||
fn = REVERSE_TYPE_MAP.get(element_name, None)
|
||||
element = {part['name']: fn}
|
||||
# emulate a true Element (complexType)
|
||||
list(messages.setdefault((message['name'], None), {message['name']: OrderedDict()}).values())[0].update(element)
|
||||
else:
|
||||
element_name = get_local_name(element_name)
|
||||
fn = elements.get(make_key(element_name, 'element', type_uri))
|
||||
if not fn:
|
||||
# some axis servers uses complexType for part messages
|
||||
fn = elements.get(make_key(element_name, 'complexType', type_uri))
|
||||
element = {message['name']: {part['name']: fn}}
|
||||
else:
|
||||
element = {element_name: fn}
|
||||
messages[(message['name'], part['name'])] = element
|
||||
|
||||
for port_type in wsdl.portType:
|
||||
port_type_name = port_type['name']
|
||||
|
||||
for binding in port_type_bindings.get(port_type_name, []):
|
||||
for operation in port_type.operation:
|
||||
op_name = operation['name']
|
||||
op = operations[binding['name']][op_name]
|
||||
op['documentation'] = unicode(operation('documentation', error=False)) or ''
|
||||
if binding['soap_ver']:
|
||||
#TODO: separe operation_binding from operation (non SOAP?)
|
||||
if operation('input', error=False):
|
||||
input_msg = get_local_name(operation.input['message'])
|
||||
input_header = op['parts'].get('input_header')
|
||||
if input_header:
|
||||
header_msg = get_local_name(input_header.get('message'))
|
||||
header_part = get_local_name(input_header.get('part'))
|
||||
# warning: some implementations use a separate message!
|
||||
header = get_message(messages, header_msg or input_msg, header_part)
|
||||
else:
|
||||
header = None # not enought info to search the header message:
|
||||
op['input'] = get_message(messages, input_msg, op['parts'].get('input_body'))
|
||||
op['header'] = header
|
||||
try:
|
||||
element = list(op['input'].values())[0]
|
||||
ns_uri = element.namespace
|
||||
qualified = element.qualified
|
||||
except AttributeError:
|
||||
# TODO: fix if no parameters parsed or "variants"
|
||||
ns = get_namespace_prefix(operation.input['message'])
|
||||
ns_uri = operation.get_namespace_uri(ns)
|
||||
qualified = None
|
||||
if ns_uri:
|
||||
op['namespace'] = ns_uri
|
||||
op['qualified'] = qualified
|
||||
else:
|
||||
op['input'] = None
|
||||
op['header'] = None
|
||||
if operation('output', error=False):
|
||||
output_msg = get_local_name(operation.output['message'])
|
||||
op['output'] = get_message(messages, output_msg, op['parts'].get('output_body'))
|
||||
else:
|
||||
op['output'] = None
|
||||
wsdl = self._url_to_xml_tree(url, cache, force_download)
|
||||
services = self._xml_tree_to_services(wsdl, cache, force_download)
|
||||
|
||||
# dump the full service/port/operation map
|
||||
#log.debug(pprint.pformat(services))
|
||||
@@ -722,6 +890,56 @@ class SoapClient(object):
|
||||
log.debug('removing %s' % self.cacert)
|
||||
os.unlink(self.cacert)
|
||||
|
||||
def __repr__(self):
|
||||
s = 'SOAP CLIENT'
|
||||
s += '\n ELEMENTS'
|
||||
for e in self.elements:
|
||||
if isinstance(e, type):
|
||||
e = e.__name__
|
||||
elif isinstance(e, Alias):
|
||||
e = e.xml_type
|
||||
elif isinstance(e, Struct) and e.key[1]=='element':
|
||||
e = repr(e)
|
||||
else:
|
||||
continue
|
||||
s += '\n %s' % e
|
||||
for service in self.services:
|
||||
s += '\n SERVICE (%s)' % service
|
||||
ports = self.services[service]['ports']
|
||||
for port in ports:
|
||||
port = ports[port]
|
||||
if port['soap_ver'] == None: continue
|
||||
s += '\n PORT (%s)' % port['name']
|
||||
s += '\n Location: %s' % port['location']
|
||||
s += '\n Soap ver: %s' % port['soap_ver']
|
||||
s += '\n Soap URI: %s' % port['soap_uri']
|
||||
s += '\n OPERATIONS'
|
||||
operations = port['operations']
|
||||
for operation in sorted(operations):
|
||||
operation = self.get_operation(operation)
|
||||
input = operation.get('input')
|
||||
input = input and input.values() and list(input.values())[0]
|
||||
input_str = ''
|
||||
if isinstance(input, dict):
|
||||
if 'parameters' not in input or input['parameters']!=None:
|
||||
for k, v in input.items():
|
||||
if isinstance(v, type):
|
||||
v = v.__name__
|
||||
elif isinstance(v, Alias):
|
||||
v = v.xml_type
|
||||
elif isinstance(v, Struct):
|
||||
v = v.key[0]
|
||||
input_str += '%s: %s, ' % (k, v)
|
||||
output = operation.get('output')
|
||||
if output:
|
||||
output = list(operation['output'].values())[0]
|
||||
s += '\n %s(%s)' % (
|
||||
operation['name'],
|
||||
input_str[:-2]
|
||||
)
|
||||
s += '\n > %s' % output
|
||||
|
||||
return s
|
||||
|
||||
def parse_proxy(proxy_str):
|
||||
"""Parses proxy address user:pass@host:port into a dict suitable for httplib2"""
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation; either version 3, or (at your option) any later
|
||||
# version.
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
@@ -38,7 +38,7 @@ from . import __author__, __copyright__, __license__, __version__
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch(url, http, cache=False, force_download=False, wsdl_basedir=''):
|
||||
def fetch(url, http, cache=False, force_download=False, wsdl_basedir='', headers={}):
|
||||
"""Download a document from a URL, save it locally if cache enabled"""
|
||||
|
||||
# check / append a valid schema if not given:
|
||||
@@ -46,12 +46,13 @@ def fetch(url, http, cache=False, force_download=False, wsdl_basedir=''):
|
||||
if not url_scheme in ('http', 'https', 'file'):
|
||||
for scheme in ('http', 'https', 'file'):
|
||||
try:
|
||||
path = os.path.normpath(os.path.join(wsdl_basedir, url))
|
||||
if not url.startswith("/") and scheme in ('http', 'https'):
|
||||
tmp_url = "%s://%s" % (scheme, os.path.join(wsdl_basedir, url))
|
||||
tmp_url = "%s://%s" % (scheme, path)
|
||||
else:
|
||||
tmp_url = "%s:%s" % (scheme, os.path.join(wsdl_basedir, url))
|
||||
tmp_url = "%s:%s" % (scheme, path)
|
||||
log.debug('Scheme not found, trying %s' % scheme)
|
||||
return fetch(tmp_url, http, cache, force_download, wsdl_basedir)
|
||||
return fetch(tmp_url, http, cache, force_download, wsdl_basedir, headers)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
raise RuntimeError('No scheme given for url: %s' % url)
|
||||
@@ -72,7 +73,7 @@ def fetch(url, http, cache=False, force_download=False, wsdl_basedir=''):
|
||||
xml = f.read()
|
||||
else:
|
||||
log.info('GET %s using %s' % (url, http._wrapper_version))
|
||||
response, xml = http.request(url, 'GET', None, {})
|
||||
response, xml = http.request(url, 'GET', None, headers)
|
||||
if cache:
|
||||
log.info('Writing file %s' % filename)
|
||||
if not os.path.isdir(cache):
|
||||
@@ -86,7 +87,7 @@ def fetch(url, http, cache=False, force_download=False, wsdl_basedir=''):
|
||||
def sort_dict(od, d):
|
||||
"""Sort parameters (same order as xsd:sequence)"""
|
||||
if isinstance(od, dict):
|
||||
ret = OrderedDict()
|
||||
ret = Struct()
|
||||
for k in od.keys():
|
||||
v = d.get(k)
|
||||
# don't append null tags!
|
||||
@@ -96,8 +97,9 @@ def sort_dict(od, d):
|
||||
elif isinstance(v, list):
|
||||
v = [sort_dict(od[k][0], v1) for v1 in v]
|
||||
ret[k] = v
|
||||
if hasattr(od, 'namespace'):
|
||||
ret.namespace = od.namespace
|
||||
if hasattr(od, 'namespaces'):
|
||||
ret.namespaces.update(od.namespaces)
|
||||
ret.references.update(od.references)
|
||||
ret.qualified = od.qualified
|
||||
return ret
|
||||
else:
|
||||
@@ -116,15 +118,21 @@ def make_key(element_name, element_type, namespace):
|
||||
return (element_name, eltype, namespace)
|
||||
|
||||
|
||||
def process_element(elements, element_name, node, element_type, xsd_uri, dialect, namespace, qualified=None,
|
||||
soapenc_uri = 'http://schemas.xmlsoap.org/soap/encoding/'):
|
||||
"""Parse and define simple element types"""
|
||||
def process_element(elements, element_name, node, element_type, xsd_uri,
|
||||
dialect, namespace, qualified=None,
|
||||
soapenc_uri='http://schemas.xmlsoap.org/soap/encoding/',
|
||||
struct=None):
|
||||
"""Parse and define simple element types as Struct objects"""
|
||||
|
||||
log.debug('Processing element %s %s' % (element_name, element_type))
|
||||
|
||||
# iterate over inner tags of the element definition:
|
||||
for tag in node:
|
||||
|
||||
# sanity checks (skip superfluous xml tags, resolve aliases, etc.):
|
||||
if tag.get_local_name() in ('annotation', 'documentation'):
|
||||
continue
|
||||
elif tag.get_local_name() in ('element', 'restriction'):
|
||||
elif tag.get_local_name() in ('element', 'restriction', 'list'):
|
||||
log.debug('%s has no children! %s' % (element_name, tag))
|
||||
children = tag # element "alias"?
|
||||
alias = True
|
||||
@@ -134,11 +142,21 @@ def process_element(elements, element_name, node, element_type, xsd_uri, dialect
|
||||
else:
|
||||
log.debug('%s has no children! %s' % (element_name, tag))
|
||||
continue # TODO: abstract?
|
||||
d = OrderedDict()
|
||||
d.namespace = namespace
|
||||
d.qualified = qualified
|
||||
|
||||
# check if extending a previous processed element ("extension"):
|
||||
new_struct = struct is None
|
||||
if new_struct:
|
||||
struct = Struct()
|
||||
struct.namespaces[None] = namespace # set the default namespace
|
||||
struct.qualified = qualified
|
||||
|
||||
# iterate over the element's components (sub-elements):
|
||||
for e in children:
|
||||
|
||||
# extract type information from xml attributes / children:
|
||||
t = e['type']
|
||||
if not t:
|
||||
t = e['itemType'] # xs:list
|
||||
if not t:
|
||||
t = e['base'] # complexContent (extension)!
|
||||
if not t:
|
||||
@@ -147,37 +165,58 @@ def process_element(elements, element_name, node, element_type, xsd_uri, dialect
|
||||
# "anonymous" elements had no type attribute but children
|
||||
if e['name'] and e.children():
|
||||
# create a type name to process the children
|
||||
t = "%s_%s" % (element_name, e['name'])
|
||||
t = "%s_%s" % (element_name, e['name'])
|
||||
c = e.children()
|
||||
et = c.get_local_name()
|
||||
c = c.children()
|
||||
process_element(elements, t, c, et, xsd_uri, dialect, namespace, qualified)
|
||||
process_element(elements, t, c, et, xsd_uri, dialect,
|
||||
namespace, qualified)
|
||||
else:
|
||||
t = 'anyType' # no type given!
|
||||
|
||||
# extract namespace uri and type from xml attribute:
|
||||
t = t.split(":")
|
||||
if len(t) > 1:
|
||||
ns, type_name = t
|
||||
else:
|
||||
ns, type_name = None, t[0]
|
||||
if element_name == type_name and not alias and len(children) > 1:
|
||||
continue # abort to prevent infinite recursion
|
||||
uri = ns and e.get_namespace_uri(ns) or xsd_uri
|
||||
|
||||
# look for the conversion function (python type)
|
||||
if uri in (xsd_uri, soapenc_uri) and type_name != 'Array':
|
||||
# look for the type, None == any
|
||||
fn = REVERSE_TYPE_MAP.get(type_name, None)
|
||||
elif uri == soapenc_uri and type_name == 'Array':
|
||||
if tag.get_local_name() == 'list':
|
||||
# simple list type (values separated by spaces)
|
||||
fn = lambda s: [fn(v) for v in s.split(" ")]
|
||||
elif (uri == soapenc_uri and type_name == 'Array'):
|
||||
# arrays of simple types (look at the attribute tags):
|
||||
fn = []
|
||||
for a in e.children():
|
||||
for k, v in a[:]:
|
||||
if k.endswith(":arrayType"):
|
||||
type_name = v
|
||||
fn_namespace = None
|
||||
if ":" in type_name:
|
||||
type_name = type_name[type_name.index(":")+1:]
|
||||
fn_uri, type_name = type_name.split(":")
|
||||
fn_namespace = e.get_namespace_uri(fn_uri)
|
||||
if "[]" in type_name:
|
||||
type_name = type_name[:type_name.index("[]")]
|
||||
fn.append(REVERSE_TYPE_MAP.get(type_name, None))
|
||||
type_name = type_name[:type_name.index("[]")]
|
||||
# get the scalar conversion function (if any)
|
||||
fn_array = REVERSE_TYPE_MAP.get(type_name, None)
|
||||
if fn_array is None and type_name != "anyType" and fn_namespace:
|
||||
# get the complext element:
|
||||
ref_type = "complexType"
|
||||
key = make_key(type_name, ref_type, fn_namespace)
|
||||
fn_complex = elements.setdefault(key, Struct(key))
|
||||
# create an indirect struct {type_name: ...}:
|
||||
fn_array = Struct(key)
|
||||
fn_array[type_name] = fn_complex
|
||||
fn_array.namespaces[None] = fn_namespace # set the default namespace
|
||||
fn_array.qualified = qualified
|
||||
fn.append(fn_array)
|
||||
else:
|
||||
# not a simple python type / conversion function not available
|
||||
fn = None
|
||||
|
||||
if not fn:
|
||||
@@ -189,87 +228,149 @@ def process_element(elements, element_name, node, element_type, xsd_uri, dialect
|
||||
for k, v in e[:]:
|
||||
if k.startswith("xmlns:"):
|
||||
# get the namespace uri from the element
|
||||
fn_namespace = v
|
||||
fn = elements.setdefault(make_key(type_name, 'complexType', fn_namespace), OrderedDict())
|
||||
fn_namespace = v
|
||||
# create and store an empty python element (dict) filled later
|
||||
if not e['ref']:
|
||||
ref_type = "complexType"
|
||||
else:
|
||||
ref_type = "element"
|
||||
key = make_key(type_name, ref_type, fn_namespace)
|
||||
fn = elements.setdefault(key, Struct(key))
|
||||
|
||||
if e['maxOccurs'] == 'unbounded' or (uri == soapenc_uri and type_name == 'Array'):
|
||||
# it's an array... TODO: compound arrays? and check ns uri!
|
||||
if isinstance(fn, OrderedDict):
|
||||
if len(children) > 1 and dialect in ('jetty',):
|
||||
if isinstance(fn, Struct):
|
||||
if len(children) > 1 or (dialect in ('jetty', )):
|
||||
# Jetty style support
|
||||
# {'ClassName': [{'attr1': val1, 'attr2': val2}]
|
||||
fn.array = True
|
||||
else:
|
||||
# .NET style support (backward compatibility)
|
||||
# [{'ClassName': {'attr1': val1, 'attr2': val2}]
|
||||
d.array = True
|
||||
# .NET style now matches Jetty style
|
||||
# {'ClassName': [{'attr1': val1, 'attr2': val2}]
|
||||
#fn.array = True
|
||||
#struct.array = True
|
||||
fn = [fn]
|
||||
else:
|
||||
if dialect in ('jetty',):
|
||||
# scalar support [{'attr1': [val1]}]
|
||||
if len(children) > 1 or dialect in ('jetty',):
|
||||
# Jetty style support
|
||||
# scalar array support {'attr1': [val1]}
|
||||
fn = [fn]
|
||||
else:
|
||||
d.array = True
|
||||
# Jetty.NET style support (backward compatibility)
|
||||
# scalar array support [{'attr1': val1}]
|
||||
struct.array = True
|
||||
|
||||
# store the sub-element python type (function) in the element dict
|
||||
if (e['name'] is not None and not alias) or e['ref']:
|
||||
e_name = e['name'] or type_name # for refs, use the type name
|
||||
d[e_name] = fn
|
||||
struct[e_name] = fn
|
||||
struct.references[e_name] = e['ref']
|
||||
struct.namespaces[e_name] = namespace # set the element namespace
|
||||
else:
|
||||
log.debug('complexContent/simpleType/element %s = %s' % (element_name, type_name))
|
||||
d[None] = fn
|
||||
# use None to point this is a complex element reference
|
||||
struct.refers_to = fn
|
||||
if e is not None and e.get_local_name() == 'extension' and e.children():
|
||||
# extend base element (if ComplexContent only!):
|
||||
if isinstance(fn, Struct) and fn.refers_to:
|
||||
base_struct = fn.refers_to
|
||||
else:
|
||||
# TODO: check if this actually works for SimpleContent
|
||||
base_struct = None
|
||||
# extend base element:
|
||||
process_element(elements, element_name, e.children(), element_type, xsd_uri, dialect, namespace, qualified)
|
||||
elements.setdefault(make_key(element_name, element_type, namespace), OrderedDict()).update(d)
|
||||
process_element(elements, element_name, e.children(),
|
||||
element_type, xsd_uri, dialect, namespace,
|
||||
qualified, struct=base_struct)
|
||||
|
||||
# add the processed element to the main dictionary (if not extension):
|
||||
if new_struct:
|
||||
key = make_key(element_name, element_type, namespace)
|
||||
elements.setdefault(key, Struct(key)).update(struct)
|
||||
|
||||
|
||||
def postprocess_element(elements, processed):
|
||||
"""Fix unresolved references (elements referenced before its definition, thanks .net)"""
|
||||
|
||||
"""Fix unresolved references"""
|
||||
#elements variable contains all eelements and complexTypes defined in http://www.w3.org/2001/XMLSchema
|
||||
|
||||
# (elements referenced before its definition, thanks .net)
|
||||
# avoid already processed elements:
|
||||
if elements in processed:
|
||||
return
|
||||
processed.append(elements)
|
||||
|
||||
|
||||
for k, v in elements.items():
|
||||
if isinstance(v, OrderedDict):
|
||||
if isinstance(v, Struct):
|
||||
if v != elements: # TODO: fix recursive elements
|
||||
postprocess_element(v, processed)
|
||||
if None in v and v[None]: # extension base?
|
||||
if isinstance(v[None], dict):
|
||||
for i, kk in enumerate(v[None]):
|
||||
# extend base -keep orginal order-
|
||||
if v[None] is not None:
|
||||
elements[k].insert(kk, v[None][kk], i)
|
||||
del v[None]
|
||||
try:
|
||||
postprocess_element(v, processed)
|
||||
except RuntimeError as e: # maximum recursion depth exceeded
|
||||
warnings.warn(unicode(e), RuntimeWarning)
|
||||
if v.refers_to: # extension base?
|
||||
if isinstance(v.refers_to, dict):
|
||||
extend_element(v, v.refers_to)
|
||||
# clean the reference:
|
||||
v.refers_to = None
|
||||
else: # "alias", just replace
|
||||
log.debug('Replacing %s = %s' % (k, v[None]))
|
||||
elements[k] = v[None]
|
||||
#break
|
||||
##log.debug('Replacing %s = %s' % (k, v.refers_to))
|
||||
elements[k] = v.refers_to
|
||||
if v.array:
|
||||
elements[k] = [v] # convert arrays to python lists
|
||||
if isinstance(v, list):
|
||||
for n in v: # recurse list
|
||||
if isinstance(n, (OrderedDict, list)):
|
||||
if isinstance(n, (Struct, list)):
|
||||
#if n != elements: # TODO: fix recursive elements
|
||||
postprocess_element(n, processed)
|
||||
|
||||
def extend_element(element, base):
|
||||
''' Recursively extend the elemnet if it has an extension base.'''
|
||||
''' Recursion is needed if the extension base itself extends another element.'''
|
||||
if isinstance(base, dict):
|
||||
for i, kk in enumerate(base):
|
||||
# extend base -keep orignal order-
|
||||
if isinstance(base, Struct):
|
||||
element.insert(kk, base[kk], i)
|
||||
# update namespace (avoid ArrayOfKeyValueOfanyTypeanyType)
|
||||
if isinstance(base, Struct) and base.namespaces and kk:
|
||||
element.namespaces[kk] = base.namespaces[kk]
|
||||
element.references[kk] = base.references[kk]
|
||||
if base.refers_to:
|
||||
extend_element(element, base.refers_to)
|
||||
|
||||
def get_message(messages, message_name, part_name):
|
||||
def get_message(messages, message_name, part_name, parameter_order=None):
|
||||
if part_name:
|
||||
# get the specific part of the message:
|
||||
return messages.get((message_name, part_name))
|
||||
else:
|
||||
# get the first part for the specified message:
|
||||
parts = {}
|
||||
for (message_name_key, part_name_key), message in messages.items():
|
||||
if message_name_key == message_name:
|
||||
return message
|
||||
parts[part_name_key] = message
|
||||
if len(parts)>1:
|
||||
# merge (sorted by parameter_order for rpc style)
|
||||
new_msg = None
|
||||
for part_name_key in parameter_order:
|
||||
part = parts.get(part_name_key)
|
||||
if not part:
|
||||
log.error('Part %s not found for %s' % (part_name_key, message_name))
|
||||
elif not new_msg:
|
||||
new_msg = part.copy()
|
||||
else:
|
||||
new_msg[message_name].update(part[message_name])
|
||||
return new_msg
|
||||
elif parts:
|
||||
return list(parts.values())[0]
|
||||
#return parts.values()[0]
|
||||
|
||||
|
||||
|
||||
get_local_name = lambda s: s and str((':' in s) and s.split(':')[1] or s)
|
||||
get_namespace_prefix = lambda s: s and str((':' in s) and s.split(':')[0] or None)
|
||||
|
||||
|
||||
def preprocess_schema(schema, imported_schemas, elements, xsd_uri, dialect, http, cache, force_download, wsdl_basedir, global_namespaces=None, qualified=False):
|
||||
def preprocess_schema(schema, imported_schemas, elements, xsd_uri, dialect,
|
||||
http, cache, force_download, wsdl_basedir,
|
||||
global_namespaces=None, qualified=False):
|
||||
"""Find schema elements and complex types"""
|
||||
|
||||
from .simplexml import SimpleXMLElement # here to avoid recursive imports
|
||||
@@ -290,7 +391,7 @@ def preprocess_schema(schema, imported_schemas, elements, xsd_uri, dialect, http
|
||||
for ns in local_namespaces.values():
|
||||
if ns not in global_namespaces:
|
||||
global_namespaces[ns] = 'ns%s' % len(global_namespaces)
|
||||
|
||||
|
||||
for element in schema.children() or []:
|
||||
if element.get_local_name() in ('import', 'include',):
|
||||
schema_namespace = element['namespace']
|
||||
@@ -306,9 +407,15 @@ def preprocess_schema(schema, imported_schemas, elements, xsd_uri, dialect, http
|
||||
# Open uri and read xml:
|
||||
xml = fetch(schema_location, http, cache, force_download, wsdl_basedir)
|
||||
|
||||
# recalculate base path for relative schema locations
|
||||
path = os.path.normpath(os.path.join(wsdl_basedir, schema_location))
|
||||
path = os.path.dirname(path)
|
||||
|
||||
# Parse imported XML schema (recursively):
|
||||
imported_schema = SimpleXMLElement(xml, namespace=xsd_uri)
|
||||
preprocess_schema(imported_schema, imported_schemas, elements, xsd_uri, dialect, http, cache, force_download, wsdl_basedir, global_namespaces, qualified)
|
||||
preprocess_schema(imported_schema, imported_schemas, elements,
|
||||
xsd_uri, dialect, http, cache, force_download,
|
||||
path, global_namespaces, qualified)
|
||||
|
||||
element_type = element.get_local_name()
|
||||
if element_type in ('element', 'complexType', "simpleType"):
|
||||
@@ -319,7 +426,9 @@ def preprocess_schema(schema, imported_schemas, elements, xsd_uri, dialect, http
|
||||
if element.get_local_name() == 'complexType':
|
||||
children = element.children()
|
||||
elif element.get_local_name() == 'simpleType':
|
||||
children = element('restriction', ns=xsd_uri)
|
||||
children = element('restriction', ns=xsd_uri, error=False)
|
||||
if not children:
|
||||
children = element.children() # xs:list
|
||||
elif element.get_local_name() == 'element' and element['type']:
|
||||
children = element
|
||||
else:
|
||||
@@ -329,7 +438,8 @@ def preprocess_schema(schema, imported_schemas, elements, xsd_uri, dialect, http
|
||||
elif element.get_local_name() == 'element':
|
||||
children = element
|
||||
if children:
|
||||
process_element(elements, element_name, children, element_type, xsd_uri, dialect, namespace, qualified)
|
||||
process_element(elements, element_name, children, element_type,
|
||||
xsd_uri, dialect, namespace, qualified)
|
||||
|
||||
|
||||
# simplexml utilities:
|
||||
@@ -347,10 +457,28 @@ def datetime_u(s):
|
||||
return _strptime(s, fmt)
|
||||
except ValueError:
|
||||
try:
|
||||
# strip utc offset
|
||||
if s[-3] == ":" and s[-6] in (' ', '-', '+'):
|
||||
warnings.warn('removing unsupported UTC offset', RuntimeWarning)
|
||||
s = s[:-6]
|
||||
# strip zulu timezone suffix or utc offset
|
||||
if s[-1] == "Z" or (s[-3] == ":" and s[-6] in (' ', '-', '+')):
|
||||
try:
|
||||
import iso8601
|
||||
return iso8601.parse_date(s)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import isodate
|
||||
return isodate.parse_datetime(s)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import dateutil.parser
|
||||
return dateutil.parser.parse(s)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
warnings.warn('removing unsupported "Z" suffix or UTC offset. Install `iso8601`, `isodate` or `python-dateutil` package to support it', RuntimeWarning)
|
||||
s = s[:-1] if s[-1] == "Z" else s[:-6]
|
||||
# parse microseconds
|
||||
try:
|
||||
return _strptime(s, fmt + ".%f")
|
||||
@@ -363,6 +491,7 @@ def datetime_u(s):
|
||||
s = s[:s.index(".")]
|
||||
return _strptime(s, fmt)
|
||||
|
||||
|
||||
datetime_m = lambda dt: dt.isoformat()
|
||||
date_u = lambda s: _strptime(s[0:10], "%Y-%m-%d").date()
|
||||
date_m = lambda d: d.strftime("%Y-%m-%d")
|
||||
@@ -370,7 +499,8 @@ time_u = lambda s: _strptime(s, "%H:%M:%S").time()
|
||||
time_m = lambda d: d.strftime("%H%M%S")
|
||||
bool_u = lambda s: {'0': False, 'false': False, '1': True, 'true': True}[s]
|
||||
bool_m = lambda s: {False: 'false', True: 'true'}[s]
|
||||
|
||||
decimal_m = lambda d: '{0:f}'.format(d)
|
||||
float_m = lambda f: '{0:.10f}'.format(f)
|
||||
|
||||
# aliases:
|
||||
class Alias(object):
|
||||
@@ -383,6 +513,31 @@ class Alias(object):
|
||||
def __repr__(self):
|
||||
return "<alias '%s' for '%s'>" % (self.xml_type, self.py_type)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Alias) and self.xml_type == other.xml_type
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, Alias): return self.xml_type > other.xml_type
|
||||
if isinstance(other, Struct): return False
|
||||
return True
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, Alias): return self.xml_type < other.xml_type
|
||||
if isinstance(other, Struct): return True
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__gt__(other) or self.__eq__(other)
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__gt__(other) or self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.xml_type)
|
||||
|
||||
if sys.version > '3':
|
||||
long = Alias(int, 'long')
|
||||
byte = Alias(str, 'byte')
|
||||
@@ -392,8 +547,10 @@ integer = Alias(long, 'integer')
|
||||
DateTime = datetime.datetime
|
||||
Date = datetime.date
|
||||
Time = datetime.time
|
||||
duration = Alias(str, 'duration')
|
||||
any_uri = Alias(str, 'anyURI')
|
||||
|
||||
# Define convertion function (python type): xml schema type
|
||||
# Define conversion function (python type): xml schema type
|
||||
TYPE_MAP = {
|
||||
unicode: 'string',
|
||||
bool: 'boolean',
|
||||
@@ -407,15 +564,22 @@ TYPE_MAP = {
|
||||
Decimal: 'decimal',
|
||||
datetime.datetime: 'dateTime',
|
||||
datetime.date: 'date',
|
||||
datetime.time: 'time',
|
||||
duration: 'duration',
|
||||
any_uri: 'anyURI',
|
||||
}
|
||||
TYPE_MARSHAL_FN = {
|
||||
datetime.datetime: datetime_m,
|
||||
datetime.date: date_m,
|
||||
bool: bool_m
|
||||
datetime.time: time_m,
|
||||
float: float_m,
|
||||
Decimal: decimal_m,
|
||||
bool: bool_m,
|
||||
}
|
||||
TYPE_UNMARSHAL_FN = {
|
||||
datetime.datetime: datetime_u,
|
||||
datetime.date: date_u,
|
||||
datetime.time: time_u,
|
||||
bool: bool_u,
|
||||
str: unicode,
|
||||
}
|
||||
@@ -424,19 +588,27 @@ REVERSE_TYPE_MAP = dict([(v, k) for k, v in TYPE_MAP.items()])
|
||||
|
||||
REVERSE_TYPE_MAP.update({
|
||||
'base64Binary': str,
|
||||
'unsignedByte': byte,
|
||||
'unsignedInt': int,
|
||||
'unsignedLong': long,
|
||||
'unsignedShort': short
|
||||
})
|
||||
|
||||
# insert str here to avoid collision in REVERSE_TYPE_MAP (i.e. decoding errors)
|
||||
if str not in TYPE_MAP:
|
||||
TYPE_MAP[str] = 'string'
|
||||
TYPE_MAP[str] = 'string'
|
||||
|
||||
|
||||
class OrderedDict(dict):
|
||||
"""Minimal ordered dictionary for xsd:sequences"""
|
||||
def __init__(self):
|
||||
class Struct(dict):
|
||||
"""Minimal ordered dictionary to represent elements (i.e. xsd:sequences)"""
|
||||
|
||||
def __init__(self, key=None):
|
||||
self.key = key
|
||||
self.__keys = []
|
||||
self.array = False
|
||||
self.namespace = None
|
||||
self.namespaces = {} # key: element, value: namespace URI
|
||||
self.references = {} # key: element, value: reference name
|
||||
self.refers_to = None # "symbolic linked" struct
|
||||
self.qualified = None
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
@@ -464,26 +636,71 @@ class OrderedDict(dict):
|
||||
return [(key, self[key]) for key in self.__keys]
|
||||
|
||||
def update(self, other):
|
||||
if isinstance(other, Struct) and other.key:
|
||||
self.key = other.key
|
||||
for k, v in other.items():
|
||||
self[k] = v
|
||||
# do not change if we are an array but the other is not:
|
||||
if isinstance(other, OrderedDict) and not self.array:
|
||||
if isinstance(other, Struct) and not self.array:
|
||||
self.array = other.array
|
||||
if isinstance(other, OrderedDict) and not self.namespace:
|
||||
self.namespace = other.namespace
|
||||
if isinstance(other, Struct):
|
||||
# TODO: check replacing default ns is a regression
|
||||
self.namespaces.update(other.namespaces)
|
||||
self.references.update(other.references)
|
||||
self.qualified = other.qualified
|
||||
self.refers_to = other.refers_to
|
||||
|
||||
def copy(self):
|
||||
"Make a duplicate"
|
||||
new = OrderedDict()
|
||||
new = Struct(self.key)
|
||||
new.update(self)
|
||||
return new
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Struct) and self.key == other.key and self.key != None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, Struct): return (self.key[2], self.key[0], self.key[1]) > (other.key[2], other.key[0], other.key[1])
|
||||
return True
|
||||
|
||||
def __lt__(self, other):
|
||||
if isinstance(other, Struct): return (self.key[2], self.key[0], self.key[1]) < (other.key[2], other.key[0], other.key[1])
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__gt__(other) or self.__eq__(other)
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__gt__(other) or self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.key)
|
||||
|
||||
def __str__(self):
|
||||
return "%s" % dict.__str__(self)
|
||||
|
||||
def __repr__(self):
|
||||
s = "{%s}" % ", ".join(['%s: %s' % (repr(k), repr(v)) for k, v in self.items()])
|
||||
if self.array and False:
|
||||
s = "[%s]" % s
|
||||
if not self.key: return str(self.keys())
|
||||
s = '%s' % self.key[0]
|
||||
if self.keys():
|
||||
s += ' {'
|
||||
for k, t in self.items():
|
||||
is_list = False
|
||||
if isinstance(t, list):
|
||||
is_list = True
|
||||
t = t[0]
|
||||
if isinstance(t, type):
|
||||
t = t.__name__
|
||||
pass
|
||||
elif isinstance(t, Alias):
|
||||
t = t.xml_type
|
||||
elif isinstance(t, Struct):
|
||||
t = t.key[0]
|
||||
if is_list:
|
||||
t = [t]
|
||||
s += '%s: %s, ' % (k, t)
|
||||
s = s[:-2]+'}'
|
||||
return s
|
||||
|
||||
39
gluon/contrib/pysimplesoap/plugins.py
Normal file
39
gluon/contrib/pysimplesoap/plugins.py
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
"""Pythonic simple SOAP Client plugins"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import sys
|
||||
if sys.version > '3':
|
||||
basestring = unicode = str
|
||||
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
import os
|
||||
import logging
|
||||
import hashlib
|
||||
import warnings
|
||||
|
||||
|
||||
from . import __author__, __copyright__, __license__, __version__
|
||||
|
||||
|
||||
class WSSE:
|
||||
|
||||
def preprocess(self, request):
|
||||
header = request('Header')
|
||||
|
||||
|
||||
def postprocess(response):
|
||||
return response
|
||||
42
gluon/contrib/pysimplesoap/server.py
Executable file → Normal file
42
gluon/contrib/pysimplesoap/server.py
Executable file → Normal file
@@ -6,7 +6,7 @@
|
||||
# version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
@@ -39,6 +39,13 @@ log = logging.getLogger(__name__)
|
||||
NS_RX = re.compile(r'xmlns:(\w+)="(.+?)"')
|
||||
|
||||
|
||||
class SoapFault(Exception):
|
||||
def __init__(self, faultcode=None, faultstring=None, detail=None):
|
||||
self.faultcode = faultcode or self.__class__.__name__
|
||||
self.faultstring = faultstring or ''
|
||||
self.detail = detail
|
||||
|
||||
|
||||
class SoapDispatcher(object):
|
||||
"""Simple Dispatcher for SOAP Server"""
|
||||
|
||||
@@ -115,6 +122,9 @@ class SoapDispatcher(object):
|
||||
def register_function(self, name, fn, returns=None, args=None, doc=None):
|
||||
self.methods[name] = fn, returns, args, doc or getattr(fn, "__doc__", "")
|
||||
|
||||
def response_element_name(self, method):
|
||||
return '%sResponse' % method
|
||||
|
||||
def dispatch(self, xml, action=None, fault=None):
|
||||
"""Receive and process SOAP call, returns the xml"""
|
||||
# a dict can be sent in fault to expose it to the caller
|
||||
@@ -137,7 +147,8 @@ class SoapDispatcher(object):
|
||||
# detect soap prefix and uri (xmlns attributes of Envelope)
|
||||
for k, v in request[:]:
|
||||
if v in ("http://schemas.xmlsoap.org/soap/envelope/",
|
||||
"http://www.w3.org/2003/05/soap-env",):
|
||||
"http://www.w3.org/2003/05/soap-env",
|
||||
"http://www.w3.org/2003/05/soap-envelope",):
|
||||
soap_ns = request.attributes()[k].localName
|
||||
soap_uri = request.attributes()[k].value
|
||||
|
||||
@@ -184,13 +195,20 @@ class SoapDispatcher(object):
|
||||
ret = function(**args)
|
||||
log.debug('dispathed method returns: %s', ret)
|
||||
|
||||
except SoapFault as e:
|
||||
fault.update({
|
||||
'faultcode': "%s.%s" % (soap_fault_code, e.faultcode),
|
||||
'faultstring': e.faultstring,
|
||||
'detail': e.detail
|
||||
})
|
||||
|
||||
except Exception: # This shouldn't be one huge try/except
|
||||
import sys
|
||||
etype, evalue, etb = sys.exc_info()
|
||||
log.error(traceback.format_exc())
|
||||
if self.debug:
|
||||
detail = ''.join(traceback.format_exception(etype, evalue, etb))
|
||||
detail += '\n\nXML REQUEST\n\n' + xml
|
||||
detail = u''.join(traceback.format_exception(etype, evalue, etb))
|
||||
detail += u'\n\nXML REQUEST\n\n' + xml.decode('UTF-8')
|
||||
else:
|
||||
detail = None
|
||||
fault.update({'faultcode': "%s.%s" % (soap_fault_code, etype.__name__),
|
||||
@@ -235,7 +253,7 @@ class SoapDispatcher(object):
|
||||
body.marshall("%s:Fault" % soap_ns, fault, ns=False)
|
||||
else:
|
||||
# return normal value
|
||||
res = body.add_child("%sResponse" % name, ns=self.namespace)
|
||||
res = body.add_child(self.response_element_name(name), ns=self.namespace)
|
||||
if not prefix:
|
||||
res['xmlns'] = self.namespace # add target namespace
|
||||
|
||||
@@ -251,7 +269,7 @@ class SoapDispatcher(object):
|
||||
"%s vs %s" % (str(returns_types), str(ret)))
|
||||
if not complex_type or not types_ok:
|
||||
# backward compatibility for scalar and simple types
|
||||
res.marshall(returns_types.keys()[0], ret, )
|
||||
res.marshall(list(returns_types.keys())[0], ret, )
|
||||
else:
|
||||
# new style for complex classes
|
||||
for k, v in ret.items():
|
||||
@@ -367,7 +385,7 @@ class SoapDispatcher(object):
|
||||
parse_element(n, v.items(), complex=True)
|
||||
t = "tns:%s" % n
|
||||
else:
|
||||
raise TypeError("unknonw type v for marshalling" % str(v))
|
||||
raise TypeError("unknonw type %s for marshalling" % str(v))
|
||||
e.add_attribute('type', t)
|
||||
|
||||
parse_element("%s" % method, args and args.items())
|
||||
@@ -452,7 +470,13 @@ class SOAPHandler(BaseHTTPRequestHandler):
|
||||
|
||||
def do_POST(self):
|
||||
"""SOAP POST gateway"""
|
||||
request = self.rfile.read(int(self.headers.getheader('content-length')))
|
||||
request = self.rfile.read(int(self.headers.get('content-length')))
|
||||
# convert xml request to unicode (according to request headers)
|
||||
if sys.version < '3':
|
||||
encoding = self.headers.getparam("charset")
|
||||
else:
|
||||
encoding = self.headers.get_param("charset")
|
||||
request = request.decode(encoding)
|
||||
fault = {}
|
||||
# execute the method
|
||||
response = self.server.dispatcher.dispatch(request, fault=fault)
|
||||
@@ -576,7 +600,7 @@ if __name__ == "__main__":
|
||||
namespace="http://example.com/sample.wsdl",
|
||||
soap_ns='soap',
|
||||
trace=True,
|
||||
ns=False
|
||||
ns="ns0",
|
||||
)
|
||||
p = {'a': 1, 'b': 2}
|
||||
c = [{'d': '1.20'}, {'d': '2.01'}]
|
||||
|
||||
108
gluon/contrib/pysimplesoap/simplexml.py
Executable file → Normal file
108
gluon/contrib/pysimplesoap/simplexml.py
Executable file → Normal file
@@ -6,7 +6,7 @@
|
||||
# version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
@@ -28,7 +28,7 @@ from . import __author__, __copyright__, __license__, __version__
|
||||
|
||||
# Utility functions used for marshalling, moved aside for readability
|
||||
from .helpers import TYPE_MAP, TYPE_MARSHAL_FN, TYPE_UNMARSHAL_FN, \
|
||||
REVERSE_TYPE_MAP, OrderedDict, Date, Decimal
|
||||
REVERSE_TYPE_MAP, Struct, Date, Decimal
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -79,7 +79,10 @@ class SimpleXMLElement(object):
|
||||
element = self.__document.createElementNS(self.__ns, name)
|
||||
# don't append null tags!
|
||||
if text is not None:
|
||||
element.appendChild(self.__document.createTextNode(text))
|
||||
if isinstance(text, xml.dom.minidom.CDATASection):
|
||||
element.appendChild(self.__document.createCDATASection(text.data))
|
||||
else:
|
||||
element.appendChild(self.__document.createTextNode(text))
|
||||
self._element.appendChild(element)
|
||||
return SimpleXMLElement(
|
||||
elements=[element],
|
||||
@@ -117,10 +120,15 @@ class SimpleXMLElement(object):
|
||||
else:
|
||||
return self.__document.toprettyxml(encoding='UTF-8')
|
||||
|
||||
def __repr__(self):
|
||||
"""Return the XML representation of this tag"""
|
||||
# NOTE: do not use self.as_xml('UTF-8') as it returns the whole xml doc
|
||||
return self._element.toxml('UTF-8')
|
||||
if sys.version > '3':
|
||||
def __repr__(self):
|
||||
"""Return the XML representation of this tag"""
|
||||
return self._element.toxml()
|
||||
else:
|
||||
def __repr__(self):
|
||||
"""Return the XML representation of this tag"""
|
||||
# NOTE: do not use self.as_xml('UTF-8') as it returns the whole xml doc
|
||||
return self._element.toxml('UTF-8')
|
||||
|
||||
def get_name(self):
|
||||
"""Return the tag name of this node"""
|
||||
@@ -182,6 +190,10 @@ class SimpleXMLElement(object):
|
||||
for k, v in value.items():
|
||||
self.add_attribute(k, v)
|
||||
|
||||
def __delitem__(self, item):
|
||||
"Remove an attribute"
|
||||
self._element.removeAttribute(item)
|
||||
|
||||
def __call__(self, tag=None, ns=None, children=False, root=False,
|
||||
error=True, ):
|
||||
"""Search (even in child nodes) and return a child tag by name"""
|
||||
@@ -284,17 +296,17 @@ class SimpleXMLElement(object):
|
||||
|
||||
def __unicode__(self):
|
||||
"""Returns the unicode text nodes of the current element"""
|
||||
if self._element.childNodes:
|
||||
rc = ""
|
||||
for node in self._element.childNodes:
|
||||
if node.nodeType == node.TEXT_NODE:
|
||||
rc = rc + node.data
|
||||
return rc
|
||||
return ''
|
||||
rc = ''
|
||||
for node in self._element.childNodes:
|
||||
if node.nodeType == node.TEXT_NODE or node.nodeType == node.CDATA_SECTION_NODE:
|
||||
rc = rc + node.data
|
||||
return rc
|
||||
|
||||
def __str__(self):
|
||||
"""Returns the str text nodes of the current element"""
|
||||
return self.__unicode__()
|
||||
if sys.version > '3':
|
||||
__str__ = __unicode__
|
||||
else:
|
||||
def __str__(self):
|
||||
return self.__unicode__().encode('utf-8')
|
||||
|
||||
def __int__(self):
|
||||
"""Returns the integer value of the current element"""
|
||||
@@ -313,7 +325,7 @@ class SimpleXMLElement(object):
|
||||
#import pdb; pdb.set_trace()
|
||||
|
||||
"""Convert to python values the current serialized xml element"""
|
||||
# types is a dict of {tag name: convertion function}
|
||||
# types is a dict of {tag name: conversion function}
|
||||
# strict=False to use default type conversion if not specified
|
||||
# example: types={'p': {'a': int,'b': int}, 'c': [{'d':str}]}
|
||||
# expected xml: <p><a>1</a><b>2</b></p><c><d>hola</d><d>chau</d>
|
||||
@@ -329,7 +341,7 @@ class SimpleXMLElement(object):
|
||||
if ref_node['id'] == href:
|
||||
node = ref_node
|
||||
ref_name_type = ref_node['xsi:type'].split(":")[1]
|
||||
break
|
||||
break
|
||||
|
||||
try:
|
||||
if isinstance(types, dict):
|
||||
@@ -341,12 +353,29 @@ class SimpleXMLElement(object):
|
||||
else:
|
||||
fn = types
|
||||
except (KeyError, ) as e:
|
||||
xmlns = node['xmlns'] or node.get_namespace_uri(node.get_prefix())
|
||||
if 'xsi:type' in node.attributes().keys():
|
||||
xsd_type = node['xsi:type'].split(":")[1]
|
||||
try:
|
||||
fn = REVERSE_TYPE_MAP[xsd_type]
|
||||
# get fn type from SOAP-ENC:arrayType="xsd:string[28]"
|
||||
if xsd_type == 'Array':
|
||||
array_type = [k for k,v in node[:] if 'arrayType' in k][0]
|
||||
xsd_type = node[array_type].split(":")[1]
|
||||
if "[" in xsd_type:
|
||||
xsd_type = xsd_type[:xsd_type.index("[")]
|
||||
fn = [REVERSE_TYPE_MAP[xsd_type]]
|
||||
else:
|
||||
fn = REVERSE_TYPE_MAP[xsd_type]
|
||||
except:
|
||||
fn = None # ignore multirefs!
|
||||
elif xmlns == "http://www.w3.org/2001/XMLSchema":
|
||||
# self-defined schema, return the SimpleXMLElement
|
||||
# TODO: parse to python types if <s:element ref="s:schema"/>
|
||||
fn = None
|
||||
elif None in types:
|
||||
# <s:any/>, return the SimpleXMLElement
|
||||
# TODO: check position of None if inside <s:sequence>
|
||||
fn = None
|
||||
elif strict:
|
||||
raise TypeError("Tag: %s invalid (type not found)" % (name,))
|
||||
else:
|
||||
@@ -356,7 +385,9 @@ class SimpleXMLElement(object):
|
||||
if isinstance(fn, list):
|
||||
# append to existing list (if any) - unnested dict arrays -
|
||||
value = d.setdefault(name, [])
|
||||
children = node.children()
|
||||
# If the node has no children then the node itself might
|
||||
# have multiple occurrences:
|
||||
children = node.children() or node
|
||||
# TODO: check if this was really needed (get first child only)
|
||||
##if len(fn[0]) == 1 and children:
|
||||
## children = children()
|
||||
@@ -365,14 +396,15 @@ class SimpleXMLElement(object):
|
||||
for child in (children or []):
|
||||
tmp_dict = child.unmarshall(fn[0], strict)
|
||||
value.extend(tmp_dict.values())
|
||||
elif (self.__jetty and len(fn[0]) > 1):
|
||||
# Jetty array style support [{k, v}]
|
||||
#elif (self.__jetty and len(fn[0]) > 1):
|
||||
elif (len(fn[0]) > 1):
|
||||
# Jetty and now all dialects use array style support [{k, v}]
|
||||
for parent in node:
|
||||
tmp_dict = {} # unmarshall each value & mix
|
||||
for child in (node.children() or []):
|
||||
tmp_dict.update(child.unmarshall(fn[0], strict))
|
||||
value.append(tmp_dict)
|
||||
else: # .Net / Java
|
||||
else: # len(fn[0]) == 0
|
||||
for child in (children or []):
|
||||
value.append(child.unmarshall(fn[0], strict))
|
||||
|
||||
@@ -446,9 +478,12 @@ class SimpleXMLElement(object):
|
||||
for k, v in value.items():
|
||||
if not add_children_ns:
|
||||
ns = False
|
||||
else:
|
||||
elif hasattr(value, 'namespaces'):
|
||||
# for children, use the wsdl element target namespace:
|
||||
ns = getattr(value, 'namespace', None)
|
||||
ns = value.namespaces.get(k)
|
||||
else:
|
||||
# simple type
|
||||
ns = None
|
||||
child.marshall(k, v, add_comments=add_comments, ns=ns)
|
||||
elif isinstance(value, tuple): # serialize tuple (<key>value</key>)
|
||||
child = add_child and self.add_child(name, ns=ns) or self
|
||||
@@ -456,15 +491,23 @@ class SimpleXMLElement(object):
|
||||
ns = False
|
||||
for k, v in value:
|
||||
getattr(self, name).marshall(k, v, add_comments=add_comments, ns=ns)
|
||||
elif isinstance(value, list): # serialize lists
|
||||
elif isinstance(value, list): # serialize lists name: [value1, value2]
|
||||
# list elements should be a dict with one element:
|
||||
# 'vats': [{'vat': {'vat_amount': 50, 'vat_percent': 5}}, {...}]
|
||||
# or an array of complex types directly (a.k.a. jetty dialect)
|
||||
# 'vat': [{'vat_amount': 100, 'vat_percent': 21.0}, {...}]
|
||||
child = self.add_child(name, ns=ns)
|
||||
if not add_children_ns:
|
||||
ns = False
|
||||
if add_comments:
|
||||
child.add_comment("Repetitive array of:")
|
||||
for t in value:
|
||||
for i, t in enumerate(value):
|
||||
child.marshall(name, t, False, add_comments=add_comments, ns=ns)
|
||||
elif isinstance(value, basestring): # do not convert strings or unicodes
|
||||
# "jetty" arrays: add new base node (if not last) -see abobe-
|
||||
# TODO: this could be an issue for some arrays of single values
|
||||
if isinstance(t, dict) and len(t) > 1 and i < len(value) - 1:
|
||||
child = self.add_child(name, ns=ns)
|
||||
elif isinstance(value, (xml.dom.minidom.CDATASection, basestring)): # do not convert strings or unicodes
|
||||
self.add_child(name, value, ns=ns)
|
||||
elif value is None: # sent a empty tag?
|
||||
self.add_child(name, ns=ns)
|
||||
@@ -480,3 +523,10 @@ class SimpleXMLElement(object):
|
||||
def import_node(self, other):
|
||||
x = self.__document.importNode(other._element, True) # deep copy
|
||||
self._element.appendChild(x)
|
||||
|
||||
def write_c14n(self, output=None, exclusive=True):
|
||||
"Generate the canonical version of the XML node"
|
||||
from . import c14n
|
||||
xml = c14n.Canonicalize(self._element, output,
|
||||
unsuppressedPrefixes=[] if exclusive else None)
|
||||
return xml
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
# version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
|
||||
import logging
|
||||
import ssl
|
||||
import sys
|
||||
try:
|
||||
import urllib2
|
||||
@@ -23,7 +24,7 @@ except ImportError:
|
||||
from http.cookiejar import CookieJar
|
||||
|
||||
from . import __author__, __copyright__, __license__, __version__, TIMEOUT
|
||||
from .simplexml import SimpleXMLElement, TYPE_MAP, OrderedDict
|
||||
from .simplexml import SimpleXMLElement, TYPE_MAP, Struct
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -83,14 +84,14 @@ else:
|
||||
_wrapper_name = 'httplib2'
|
||||
|
||||
def __init__(self, timeout, proxy=None, cacert=None, sessions=False):
|
||||
##httplib2.debuglevel=4
|
||||
# httplib2.debuglevel=4
|
||||
kwargs = {}
|
||||
if proxy:
|
||||
import socks
|
||||
kwargs['proxy_info'] = httplib2.ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, **proxy)
|
||||
log.info("using proxy %s" % proxy)
|
||||
|
||||
# set optional parameters according supported httplib2 version
|
||||
# set optional parameters according to supported httplib2 version
|
||||
if httplib2.__version__ >= '0.3.0':
|
||||
kwargs['timeout'] = timeout
|
||||
if httplib2.__version__ >= '0.7.0':
|
||||
@@ -121,12 +122,21 @@ class urllib2Transport(TransportBase):
|
||||
raise RuntimeError('proxy is not supported with urllib2 transport')
|
||||
if cacert:
|
||||
raise RuntimeError('cacert is not support with urllib2 transport')
|
||||
|
||||
handlers = []
|
||||
|
||||
self.request_opener = urllib2.urlopen
|
||||
if ((sys.version_info[0] == 2 and sys.version_info >= (2,7,9)) or
|
||||
(sys.version_info[0] == 3 and sys.version_info >= (3,2,0))):
|
||||
context = ssl.create_default_context()
|
||||
context.check_hostname = False
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
handlers.append(urllib2.HTTPSHandler(context=context))
|
||||
|
||||
if sessions:
|
||||
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(CookieJar()))
|
||||
self.request_opener = opener.open
|
||||
|
||||
handlers.append(urllib2.HTTPCookieProcessor(CookieJar()))
|
||||
|
||||
opener = urllib2.build_opener(*handlers)
|
||||
self.request_opener = opener.open
|
||||
self._timeout = timeout
|
||||
|
||||
def request(self, url, method="GET", body=None, headers={}):
|
||||
@@ -142,10 +152,8 @@ class urllib2Transport(TransportBase):
|
||||
_http_connectors['urllib2'] = urllib2Transport
|
||||
_http_facilities.setdefault('sessions', []).append('urllib2')
|
||||
|
||||
import sys
|
||||
if sys.version_info >= (2, 6):
|
||||
_http_facilities.setdefault('timeout', []).append('urllib2')
|
||||
del sys
|
||||
|
||||
#
|
||||
# pycurl support.
|
||||
@@ -191,7 +199,7 @@ else:
|
||||
c.setopt(c.CAINFO, self.cacert)
|
||||
c.setopt(pycurl.SSL_VERIFYPEER, self.cacert and 1 or 0)
|
||||
c.setopt(pycurl.SSL_VERIFYHOST, self.cacert and 2 or 0)
|
||||
c.setopt(pycurl.CONNECTTIMEOUT, self.timeout / 6)
|
||||
c.setopt(pycurl.CONNECTTIMEOUT, self.timeout)
|
||||
c.setopt(pycurl.TIMEOUT, self.timeout)
|
||||
if method == 'POST':
|
||||
c.setopt(pycurl.POST, 1)
|
||||
|
||||
215
gluon/contrib/pysimplesoap/wsse.py
Normal file
215
gluon/contrib/pysimplesoap/wsse.py
Normal file
@@ -0,0 +1,215 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 3, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
"""Pythonic simple SOAP Client plugins for WebService Security extensions"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import sys
|
||||
if sys.version > '3':
|
||||
basestring = unicode = str
|
||||
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
import os
|
||||
import logging
|
||||
import hashlib
|
||||
import warnings
|
||||
|
||||
from . import __author__, __copyright__, __license__, __version__
|
||||
from .simplexml import SimpleXMLElement
|
||||
|
||||
import random
|
||||
import string
|
||||
from hashlib import sha1
|
||||
|
||||
def randombytes(N):
|
||||
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(N))
|
||||
|
||||
# Namespaces:
|
||||
|
||||
WSSE_URI = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
|
||||
WSU_URI = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd"
|
||||
XMLDSIG_URI = "http://www.w3.org/2000/09/xmldsig#"
|
||||
X509v3_URI = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3"
|
||||
Base64Binary_URI = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary"
|
||||
PasswordDigest_URI = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordDigest"
|
||||
|
||||
|
||||
class UsernameToken:
|
||||
"WebService Security extension to add a basic credentials to xml request"
|
||||
|
||||
def __init__(self, username="", password=""):
|
||||
self.token = {
|
||||
'wsse:UsernameToken': {
|
||||
'wsse:Username': username,
|
||||
'wsse:Password': password,
|
||||
}
|
||||
}
|
||||
|
||||
def preprocess(self, client, request, method, args, kwargs, headers, soap_uri):
|
||||
"Add basic credentials to outgoing message"
|
||||
# always extract WS Security header and send it
|
||||
header = request('Header', ns=soap_uri, )
|
||||
k = 'wsse:Security'
|
||||
# for backward compatibility, use header if given:
|
||||
if k in headers:
|
||||
self.token = headers[k]
|
||||
# convert the token to xml
|
||||
header.marshall(k, self.token, ns=False, add_children_ns=False)
|
||||
header(k)['xmlns:wsse'] = WSSE_URI
|
||||
#<wsse:UsernameToken xmlns:wsu='http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'>
|
||||
|
||||
def postprocess(self, client, response, method, args, kwargs, headers, soap_uri):
|
||||
"Analyze incoming credentials"
|
||||
# TODO: add some password validation callback?
|
||||
pass
|
||||
|
||||
class UsernameDigestToken(UsernameToken):
|
||||
"""
|
||||
WebService Security extension to add a http digest credentials to xml request
|
||||
drift -> time difference from the server in seconds, needed for 'Created' header
|
||||
"""
|
||||
|
||||
def __init__(self, username="", password="", drift=0):
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.drift = datetime.timedelta(seconds=drift)
|
||||
|
||||
def preprocess(self, client, request, method, args, kwargs, headers, soap_uri):
|
||||
header = request('Header', ns=soap_uri, )
|
||||
wsse = header.add_child('wsse:Security', ns=False)
|
||||
wsse['xmlns:wsse'] = WSSE_URI
|
||||
wsse['xmlns:wsu'] = WSU_URI
|
||||
|
||||
usertoken = wsse.add_child('wsse:UsernameToken', ns=False)
|
||||
usertoken.add_child('wsse:Username', self.username, ns=False)
|
||||
|
||||
created = (datetime.datetime.utcnow() + self.drift).isoformat() + 'Z'
|
||||
usertoken.add_child('wsu:Created', created, ns=False)
|
||||
|
||||
nonce = randombytes(16)
|
||||
wssenonce = usertoken.add_child('wsse:Nonce', nonce.encode('base64')[:-1], ns=False)
|
||||
wssenonce['EncodingType'] = Base64Binary_URI
|
||||
|
||||
sha1obj = sha1()
|
||||
sha1obj.update(nonce + created + self.password)
|
||||
digest = sha1obj.digest()
|
||||
password = usertoken.add_child('wsse:Password', digest.encode('base64')[:-1], ns=False)
|
||||
password['Type'] = PasswordDigest_URI
|
||||
|
||||
|
||||
BIN_TOKEN_TMPL = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<wsse:Security soapenv:mustUnderstand="1" xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd">
|
||||
<wsse:BinarySecurityToken EncodingType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary" ValueType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3" wsu:Id="CertId-45851B081998E431E8132880700036719" xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
|
||||
%(certificate)s</wsse:BinarySecurityToken>
|
||||
<ds:Signature Id="Signature-13" xmlns:ds="http://www.w3.org/2000/09/xmldsig#">
|
||||
%(signed_info)s
|
||||
<ds:SignatureValue>%(signature_value)s</ds:SignatureValue>
|
||||
<ds:KeyInfo Id="KeyId-45851B081998E431E8132880700036720">
|
||||
<wsse:SecurityTokenReference wsu:Id="STRId-45851B081998E431E8132880700036821" xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd">
|
||||
<wsse:Reference URI="#CertId-45851B081998E431E8132880700036719" ValueType="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3"/>
|
||||
</wsse:SecurityTokenReference>
|
||||
</ds:KeyInfo>
|
||||
</ds:Signature>
|
||||
</wsse:Security>
|
||||
"""
|
||||
|
||||
class BinaryTokenSignature:
|
||||
"WebService Security extension to add a basic signature to xml request"
|
||||
|
||||
def __init__(self, certificate="", private_key="", password=None, cacert=None):
|
||||
# read the X509v3 certificate (PEM)
|
||||
self.certificate = ''.join([line for line in open(certificate)
|
||||
if not line.startswith("---")])
|
||||
self.private_key = private_key
|
||||
self.password = password
|
||||
self.cacert = cacert
|
||||
|
||||
def preprocess(self, client, request, method, args, kwargs, headers, soap_uri):
|
||||
"Sign the outgoing SOAP request"
|
||||
# get xml elements:
|
||||
body = request('Body', ns=soap_uri, )
|
||||
header = request('Header', ns=soap_uri, )
|
||||
# prepare body xml attributes to be signed (reference)
|
||||
body['wsu:Id'] = "id-14"
|
||||
body['xmlns:wsu'] = WSU_URI
|
||||
# workaround: copy namespaces so lxml can parse the xml to be signed
|
||||
for attr, value in request[:]:
|
||||
if attr.startswith("xmlns"):
|
||||
body[attr] = value
|
||||
# use the internal tag xml representation (not the full xml document)
|
||||
ref_xml = repr(body)
|
||||
# sign using RSA-SHA1 (XML Security)
|
||||
from . import xmlsec
|
||||
vars = xmlsec.rsa_sign(ref_xml, "#id-14",
|
||||
self.private_key, self.password)
|
||||
vars['certificate'] = self.certificate
|
||||
# generate the xml (filling the placeholders)
|
||||
wsse = SimpleXMLElement(BIN_TOKEN_TMPL % vars)
|
||||
header.import_node(wsse)
|
||||
|
||||
def postprocess(self, client, response, method, args, kwargs, headers, soap_uri):
|
||||
"Verify the signature of the incoming response"
|
||||
from . import xmlsec
|
||||
# get xml elements:
|
||||
body = response('Body', ns=soap_uri, )
|
||||
header = response('Header', ns=soap_uri, )
|
||||
wsse = header("Security", ns=WSSE_URI)
|
||||
cert = wsse("BinarySecurityToken", ns=WSSE_URI)
|
||||
# check that the cert (binary token) is coming in the correct format:
|
||||
self.__check(cert["EncodingType"], Base64Binary_URI)
|
||||
self.__check(cert["ValueType"], X509v3_URI)
|
||||
# extract the certificate (in DER to avoid new line & padding issues!)
|
||||
cert_der = str(cert).decode("base64")
|
||||
public_key = xmlsec.x509_extract_rsa_public_key(cert_der, binary=True)
|
||||
# validate the certificate using the certification authority:
|
||||
if not self.cacert:
|
||||
warnings.warn("No CA provided, WSSE not validating certificate")
|
||||
elif not xmlsec.x509_verify(self.cacert, cert_der, binary=True):
|
||||
raise RuntimeError("WSSE certificate validation failed")
|
||||
# check body xml attributes was signed correctly (reference)
|
||||
self.__check(body['xmlns:wsu'], WSU_URI)
|
||||
ref_uri = body['wsu:Id']
|
||||
signature = wsse("Signature", ns=XMLDSIG_URI)
|
||||
signed_info = signature("SignedInfo", ns=XMLDSIG_URI)
|
||||
signature_value = signature("SignatureValue", ns=XMLDSIG_URI)
|
||||
# TODO: these sanity checks should be moved to xmlsec?
|
||||
self.__check(signed_info("Reference", ns=XMLDSIG_URI)['URI'], "#" + ref_uri)
|
||||
self.__check(signed_info("SignatureMethod", ns=XMLDSIG_URI)['Algorithm'],
|
||||
XMLDSIG_URI + "rsa-sha1")
|
||||
self.__check(signed_info("Reference", ns=XMLDSIG_URI)("DigestMethod", ns=XMLDSIG_URI)['Algorithm'],
|
||||
XMLDSIG_URI + "sha1")
|
||||
# TODO: check KeyInfo uses the correct SecurityTokenReference
|
||||
# workaround: copy namespaces so lxml can parse the xml to be signed
|
||||
for attr, value in response[:]:
|
||||
if attr.startswith("xmlns"):
|
||||
body[attr] = value
|
||||
# use the internal tag xml representation (not the full xml document)
|
||||
ref_xml = xmlsec.canonicalize(repr(body))
|
||||
# verify the signed hash
|
||||
computed_hash = xmlsec.sha1_hash_digest(ref_xml)
|
||||
digest_value = str(signed_info("Reference", ns=XMLDSIG_URI)("DigestValue", ns=XMLDSIG_URI))
|
||||
if computed_hash != digest_value:
|
||||
raise RuntimeError("WSSE SHA1 hash digests mismatch")
|
||||
# workaround: prepare the signed info (assure the parent ns is present)
|
||||
signed_info['xmlns'] = XMLDSIG_URI
|
||||
xml = repr(signed_info)
|
||||
# verify the signature using RSA-SHA1 (XML Security)
|
||||
ok = xmlsec.rsa_verify(xml, str(signature_value), public_key)
|
||||
if not ok:
|
||||
raise RuntimeError("WSSE RSA-SHA1 signature verification failed")
|
||||
# TODO: remove any unsigned part from the xml?
|
||||
|
||||
def __check(self, value, expected, msg="WSSE sanity check failed"):
|
||||
if value != expected:
|
||||
raise RuntimeError(msg)
|
||||
220
gluon/contrib/pysimplesoap/xmlsec.py
Normal file
220
gluon/contrib/pysimplesoap/xmlsec.py
Normal file
@@ -0,0 +1,220 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by the
|
||||
# Free Software Foundation; either version 3, or (at your option) any later
|
||||
# version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
# for more details.
|
||||
|
||||
"""Pythonic XML Security Library implementation"""
|
||||
from __future__ import print_function
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
from cStringIO import StringIO
|
||||
from M2Crypto import BIO, EVP, RSA, X509, m2
|
||||
|
||||
# if lxml is not installed, use c14n.py native implementation
|
||||
try:
|
||||
import lxml.etree
|
||||
except ImportError:
|
||||
lxml = None
|
||||
|
||||
# Features:
|
||||
# * Uses M2Crypto and lxml (libxml2) but it is independent from libxmlsec1
|
||||
# * Sign, Verify, Encrypt & Decrypt XML documents
|
||||
|
||||
# Enveloping templates ("by reference": signature is parent):
|
||||
SIGN_REF_TMPL = """
|
||||
<SignedInfo xmlns="http://www.w3.org/2000/09/xmldsig#">
|
||||
<CanonicalizationMethod Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#" />
|
||||
<SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
|
||||
<Reference URI="%(ref_uri)s">
|
||||
<Transforms>
|
||||
<Transform Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#" />
|
||||
</Transforms>
|
||||
<DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
|
||||
<DigestValue>%(digest_value)s</DigestValue>
|
||||
</Reference>
|
||||
</SignedInfo>
|
||||
"""
|
||||
SIGNED_TMPL = """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Signature xmlns="http://www.w3.org/2000/09/xmldsig#">
|
||||
%(signed_info)s
|
||||
<SignatureValue>%(signature_value)s</SignatureValue>
|
||||
%(key_info)s
|
||||
%(ref_xml)s
|
||||
</Signature>
|
||||
"""
|
||||
|
||||
# Enveloped templates (signature is child, the reference is the root object):
|
||||
SIGN_ENV_TMPL = """
|
||||
<SignedInfo xmlns="http://www.w3.org/2000/09/xmldsig#">
|
||||
<CanonicalizationMethod Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"/>
|
||||
<SignatureMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1"/>
|
||||
<Reference URI="">
|
||||
<Transforms>
|
||||
<Transform Algorithm="http://www.w3.org/2000/09/xmldsig#enveloped-signature"/>
|
||||
<Transform Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315"/>
|
||||
</Transforms>
|
||||
<DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1"/>
|
||||
<DigestValue>%(digest_value)s</DigestValue>
|
||||
</Reference>
|
||||
</SignedInfo>
|
||||
"""
|
||||
SIGNATURE_TMPL = """<Signature xmlns="http://www.w3.org/2000/09/xmldsig#">
|
||||
%(signed_info)s
|
||||
<SignatureValue>%(signature_value)s</SignatureValue>
|
||||
%(key_info)s
|
||||
</Signature>"""
|
||||
|
||||
KEY_INFO_RSA_TMPL = """
|
||||
<KeyInfo>
|
||||
<KeyValue>
|
||||
<RSAKeyValue>
|
||||
<Modulus>%(modulus)s</Modulus>
|
||||
<Exponent>%(exponent)s</Exponent>
|
||||
</RSAKeyValue>
|
||||
</KeyValue>
|
||||
</KeyInfo>
|
||||
"""
|
||||
|
||||
KEY_INFO_X509_TMPL = """
|
||||
<KeyInfo>
|
||||
<X509Data>
|
||||
<X509IssuerSerial>
|
||||
<X509IssuerName>%(issuer_name)s</X509IssuerName>
|
||||
<X509SerialNumber>%(serial_number)s</X509SerialNumber>
|
||||
</X509IssuerSerial>
|
||||
</X509Data>
|
||||
</KeyInfo>
|
||||
"""
|
||||
|
||||
def canonicalize(xml, c14n_exc=True):
|
||||
"Return the canonical (c14n) form of the xml document for hashing"
|
||||
# UTF8, normalization of line feeds/spaces, quoting, attribute ordering...
|
||||
output = StringIO()
|
||||
if lxml is not None:
|
||||
# use faster libxml2 / lxml canonicalization function if available
|
||||
et = lxml.etree.parse(StringIO(xml))
|
||||
et.write_c14n(output, exclusive=c14n_exc)
|
||||
else:
|
||||
# use pure-python implementation: c14n.py (avoid recursive import)
|
||||
from .simplexml import SimpleXMLElement
|
||||
SimpleXMLElement(xml).write_c14n(output, exclusive=c14n_exc)
|
||||
return output.getvalue()
|
||||
|
||||
|
||||
def sha1_hash_digest(payload):
|
||||
"Create a SHA1 hash and return the base64 string"
|
||||
return base64.b64encode(hashlib.sha1(payload).digest())
|
||||
|
||||
|
||||
def rsa_sign(xml, ref_uri, private_key, password=None, cert=None, c14n_exc=True,
|
||||
sign_template=SIGN_REF_TMPL, key_info_template=KEY_INFO_RSA_TMPL):
|
||||
"Sign an XML document usign RSA (templates: enveloped -ref- or enveloping)"
|
||||
|
||||
# normalize the referenced xml (to compute the SHA1 hash)
|
||||
ref_xml = canonicalize(xml, c14n_exc)
|
||||
# create the signed xml normalized (with the referenced uri and hash value)
|
||||
signed_info = sign_template % {'ref_uri': ref_uri,
|
||||
'digest_value': sha1_hash_digest(ref_xml)}
|
||||
signed_info = canonicalize(signed_info, c14n_exc)
|
||||
# Sign the SHA1 digest of the signed xml using RSA cipher
|
||||
pkey = RSA.load_key(private_key, lambda *args, **kwargs: password)
|
||||
signature = pkey.sign(hashlib.sha1(signed_info).digest())
|
||||
# build the mapping (placeholders) to create the final xml signed message
|
||||
return {
|
||||
'ref_xml': ref_xml, 'ref_uri': ref_uri,
|
||||
'signed_info': signed_info,
|
||||
'signature_value': base64.b64encode(signature),
|
||||
'key_info': key_info(pkey, cert, key_info_template),
|
||||
}
|
||||
|
||||
|
||||
def rsa_verify(xml, signature, key, c14n_exc=True):
|
||||
"Verify a XML document signature usign RSA-SHA1, return True if valid"
|
||||
|
||||
# load the public key (from buffer or filename)
|
||||
if key.startswith("-----BEGIN PUBLIC KEY-----"):
|
||||
bio = BIO.MemoryBuffer(key)
|
||||
rsa = RSA.load_pub_key_bio(bio)
|
||||
else:
|
||||
rsa = RSA.load_pub_key(certificate)
|
||||
# create the digital envelope
|
||||
pubkey = EVP.PKey()
|
||||
pubkey.assign_rsa(rsa)
|
||||
# do the cryptographic validation (using the default sha1 hash digest)
|
||||
pubkey.reset_context(md='sha1')
|
||||
pubkey.verify_init()
|
||||
# normalize and feed the signed xml to be verified
|
||||
pubkey.verify_update(canonicalize(xml, c14n_exc))
|
||||
ret = pubkey.verify_final(base64.b64decode(signature))
|
||||
return ret == 1
|
||||
|
||||
|
||||
def key_info(pkey, cert, key_info_template):
|
||||
"Convert private key (PEM) to XML Signature format (RSAKeyValue/X509Data)"
|
||||
exponent = base64.b64encode(pkey.e[4:])
|
||||
modulus = m2.bn_to_hex(m2.mpi_to_bn(pkey.n)).decode("hex").encode("base64")
|
||||
x509 = x509_parse_cert(cert) if cert else None
|
||||
return key_info_template % {
|
||||
'modulus': modulus,
|
||||
'exponent': exponent,
|
||||
'issuer_name': x509.get_issuer().as_text() if x509 else "",
|
||||
'serial_number': x509.get_serial_number() if x509 else "",
|
||||
}
|
||||
|
||||
|
||||
# Miscellaneous certificate utility functions:
|
||||
|
||||
|
||||
def x509_parse_cert(cert, binary=False):
|
||||
"Create a X509 certificate from binary DER, plain text PEM or filename"
|
||||
if binary:
|
||||
bio = BIO.MemoryBuffer(cert)
|
||||
x509 = X509.load_cert_bio(bio, X509.FORMAT_DER)
|
||||
elif cert.startswith("-----BEGIN CERTIFICATE-----"):
|
||||
bio = BIO.MemoryBuffer(cert)
|
||||
x509 = X509.load_cert_bio(bio, X509.FORMAT_PEM)
|
||||
else:
|
||||
x509 = X509.load_cert(cert, 1)
|
||||
return x509
|
||||
|
||||
|
||||
def x509_extract_rsa_public_key(cert, binary=False):
|
||||
"Return the public key (PEM format) from a X509 certificate"
|
||||
x509 = x509_parse_cert(cert, binary)
|
||||
return x509.get_pubkey().get_rsa().as_pem()
|
||||
|
||||
|
||||
def x509_verify(cacert, cert, binary=False):
|
||||
"Validate the certificate's authenticity using a certification authority"
|
||||
ca = x509_parse_cert(cacert)
|
||||
crt = x509_parse_cert(cert, binary)
|
||||
return crt.verify(ca.get_pubkey())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# basic test of enveloping signature (the reference is a part of the xml)
|
||||
sample_xml = """<Object xmlns="http://www.w3.org/2000/09/xmldsig#" Id="object">data</Object>"""
|
||||
output = canonicalize(sample_xml)
|
||||
print (output)
|
||||
vars = rsa_sign(sample_xml, '#object', "no_encriptada.key", "password")
|
||||
print (SIGNED_TMPL % vars)
|
||||
|
||||
# basic test of enveloped signature (the reference is the document itself)
|
||||
sample_xml = """<?xml version="1.0" encoding="UTF-8"?><Object>data%s</Object>"""
|
||||
vars = rsa_sign(sample_xml % "", '', "no_encriptada.key", "password",
|
||||
sign_template=SIGN_ENV_TMPL, c14n_exc=False)
|
||||
print (sample_xml % (SIGNATURE_TMPL % vars))
|
||||
|
||||
# basic signature verification:
|
||||
public_key = x509_extract_rsa_public_key(open("zunimercado.crt").read())
|
||||
assert rsa_verify(vars['signed_info'], vars['signature_value'], public_key,
|
||||
c14n_exc=False)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,11 +16,9 @@ mostly for testing purposes
|
||||
Some examples at the bottom.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from gluon._compat import urllib2, cookielib, iteritems, to_native, urlencode, to_bytes
|
||||
import re
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
import cookielib
|
||||
|
||||
|
||||
DEFAULT_HEADERS = {
|
||||
@@ -85,10 +83,10 @@ class WebClient(object):
|
||||
|
||||
# copy headers from dict to list of key,value
|
||||
headers_list = []
|
||||
for key, value in self.default_headers.iteritems():
|
||||
for key, value in iteritems(self.default_headers):
|
||||
if not key in headers:
|
||||
headers[key] = value
|
||||
for key, value in headers.iteritems():
|
||||
for key, value in iteritems(headers):
|
||||
if isinstance(value, (list, tuple)):
|
||||
for v in value:
|
||||
headers_list.append((key, v))
|
||||
@@ -96,7 +94,7 @@ class WebClient(object):
|
||||
headers_list.append((key, value))
|
||||
|
||||
# move cookies to headers
|
||||
for key, value in cookies.iteritems():
|
||||
for key, value in iteritems(cookies):
|
||||
headers_list.append(('Cookie', '%s=%s' % (key, value)))
|
||||
|
||||
# add headers to request
|
||||
@@ -120,25 +118,29 @@ class WebClient(object):
|
||||
data['_formkey'] = self.forms[data['_formname']]
|
||||
|
||||
# time the POST request
|
||||
data = urllib.urlencode(data, doseq=True)
|
||||
data = urlencode(data, doseq=True)
|
||||
else:
|
||||
self.method = 'GET' if method=='auto' else method
|
||||
data = None
|
||||
t0 = time.time()
|
||||
self.response = opener.open(self.url, data)
|
||||
self.response = opener.open(self.url, to_bytes(data))
|
||||
self.time = time.time() - t0
|
||||
except urllib2.HTTPError as error:
|
||||
except urllib2.HTTPError as er:
|
||||
error = er
|
||||
# catch HTTP errors
|
||||
self.time = time.time() - t0
|
||||
self.response = error
|
||||
self.response = er
|
||||
|
||||
if hasattr(self.response, 'getcode'):
|
||||
self.status = self.response.getcode()
|
||||
else:#python2.5
|
||||
self.status = None
|
||||
|
||||
self.text = self.response.read()
|
||||
self.headers = dict(self.response.headers)
|
||||
self.text = to_native(self.response.read())
|
||||
# In PY3 self.response.headers are case sensitive
|
||||
self.headers = dict()
|
||||
for h in self.response.headers:
|
||||
self.headers[h.lower()] = self.response.headers[h]
|
||||
|
||||
# treat web2py tickets as special types of errors
|
||||
if error is not None:
|
||||
@@ -156,7 +158,7 @@ class WebClient(object):
|
||||
|
||||
# check is a new session id has been issued, symptom of broken session
|
||||
if self.session_regex is not None:
|
||||
for cookie, value in self.cookies.iteritems():
|
||||
for cookie, value in iteritems(self.cookies):
|
||||
match = self.session_regex.match(cookie)
|
||||
if match:
|
||||
name = match.group('name')
|
||||
@@ -166,7 +168,7 @@ class WebClient(object):
|
||||
|
||||
# find all forms and formkeys in page
|
||||
self.forms = {}
|
||||
for match in FORM_REGEX.finditer(self.text):
|
||||
for match in FORM_REGEX.finditer(to_native(self.text)):
|
||||
self.forms[match.group('formname')] = match.group('formkey')
|
||||
|
||||
# log this request
|
||||
|
||||
@@ -110,7 +110,7 @@ class HTTP(Exception):
|
||||
if status[:1] == '4':
|
||||
if not body:
|
||||
body = status
|
||||
if isinstance(body, str):
|
||||
if isinstance(body, (str, bytes, bytearray)):
|
||||
headers['Content-Length'] = len(body)
|
||||
rheaders = []
|
||||
for k, v in iteritems(headers):
|
||||
@@ -121,7 +121,7 @@ class HTTP(Exception):
|
||||
responder(status, rheaders)
|
||||
if env.get('request_method', '') == 'HEAD':
|
||||
return ['']
|
||||
elif isinstance(body, str):
|
||||
elif isinstance(body, (str, bytes, bytearray)):
|
||||
return [body]
|
||||
elif hasattr(body, '__iter__'):
|
||||
return body
|
||||
|
||||
@@ -20,9 +20,9 @@ from .test_serializers import *
|
||||
from .test_languages import *
|
||||
from .test_compileapp import *
|
||||
from .test_appadmin import *
|
||||
from .test_web import *
|
||||
|
||||
if sys.version[:3] == '2.7':
|
||||
from .test_is_url import *
|
||||
from .test_scheduler import *
|
||||
from .test_web import *
|
||||
from .test_old_doctests import *
|
||||
|
||||
@@ -16,7 +16,7 @@ from .fix_path import fix_sys_path
|
||||
fix_sys_path(__file__)
|
||||
|
||||
from gluon.contrib.webclient import WebClient
|
||||
from gluon._compat import urllib2
|
||||
from gluon._compat import urllib2, PY2
|
||||
|
||||
webserverprocess = None
|
||||
|
||||
@@ -110,6 +110,7 @@ class TestWeb(LiveTest):
|
||||
assert('expires' in s.headers)
|
||||
assert(s.headers['cache-control'].startswith('max-age'))
|
||||
|
||||
@unittest.skipIf(not(PY2), 'skip PY3 testSoap')
|
||||
def testSoap(self):
|
||||
# test soap server implementation
|
||||
from gluon.contrib.pysimplesoap.client import SoapClient, SoapFault
|
||||
|
||||
Reference in New Issue
Block a user