1
0
mirror of https://github.com/ansible/awx.git synced 2024-10-27 00:55:06 +03:00

all vendor dependencies expressed in requirements.txt

This commit is contained in:
Chris Meyers 2015-08-05 08:10:16 -04:00
parent 4a1b5070b5
commit fa1643e330
5833 changed files with 201 additions and 1076113 deletions

4
.gitignore vendored
View File

@ -83,3 +83,7 @@ tower-backup-*
env/*
nohup.out
reports
# AWX python libs populated by requirements.txt
awx/lib/site-packages

View File

@ -82,7 +82,7 @@ RPM_NVR = $(NAME)-$(VERSION)-$(RELEASE)$(RPM_DIST)
MOCK_BIN ?= mock
MOCK_CFG ?=
.PHONY: clean rebase push requirements requirements_pypi requirements_jenkins \
.PHONY: clean rebase push requirements requirements_dev requirements_jenkins requirements_post \
develop refresh adduser syncdb migrate dbchange dbshell runserver celeryd \
receiver test test_coverage coverage_html ui_analysis_report test_jenkins dev_build \
release_build release_clean sdist rpmtar mock-rpm mock-srpm rpm-sign \
@ -137,51 +137,35 @@ rebase:
push:
git push origin master
# Install third-party requirements needed for development environment (using
# locally downloaded packages).
requirements:
@if [ "$(VIRTUAL_ENV)" ]; then \
(cd requirements && pip install --no-index setuptools-12.0.5.tar.gz); \
(cd requirements && pip install --no-index Django-1.6.7.tar.gz); \
(cd requirements && pip install --no-index -r dev_local.txt); \
$(PYTHON) fix_virtualenv_setuptools.py; \
else \
(cd requirements && sudo pip install --no-index -r dev_local.txt); \
fi
requirements_post:
touch awx/lib/site-packages/oslo/__init__.py
touch awx/lib/site-packages/dogpile/__init__.py
# Install third-party requirements needed for development environment
# (downloading from PyPI if necessary).
requirements_pypi:
# Install third-party requirements needed for development environment.
requirements:
(pip install -r requirements/requirements.txt -t awx/lib/site-packages/);
@if [ "$(VIRTUAL_ENV)" ]; then \
pip install setuptools==12.0.5; \
pip install Django\>=1.6.7,\<1.7; \
pip install -r requirements/dev.txt; \
$(PYTHON) fix_virtualenv_setuptools.py; \
else \
sudo pip install -r requirements/dev.txt; \
$(PYTHON) fix_virtualenv_setuptools.py; \
fi
$(MAKE) requirements_post
requirements_dev:
(cat requirements/requirements.txt requirements/requirements_dev.txt > /tmp/req_dev.txt);
(pip install -r /tmp/req_dev.txt -t awx/lib/site-packages/);
@if [ "$(VIRTUAL_ENV)" ]; then \
$(PYTHON) fix_virtualenv_setuptools.py; \
fi
$(MAKE) requirements_post
# Install third-party requirements needed for running unittests in jenkins
# (using locally downloaded packages).
requirements_jenkins:
requirements_jenkins: requirements
(pip install -r requirements/requirements_jenkins.txt);
@if [ "$(VIRTUAL_ENV)" ]; then \
(cd requirements && pip install --no-index distribute-0.7.3.zip || true); \
(cd requirements && pip install --no-index pip-1.5.4.tar.gz || true); \
else \
(cd requirements && sudo pip install --no-index distribute-0.7.3.zip || true); \
(cd requirements && sudo pip install --no-index pip-1.5.4.tar.gz || true); \
fi
$(MAKE) requirements
@if [ "$(VIRTUAL_ENV)" ]; then \
(cd requirements && pip install -r jenkins.txt); \
$(PYTHON) fix_virtualenv_setuptools.py; \
else \
(cd requirements && sudo pip install -r jenkins.txt); \
$(PYTHON) fix_virtualenv_setuptools.py; \
fi
npm install csslint jshint
# "Install" ansible-tower package in development mode. Creates link to working
# copy in site-packages and installs awx-manage command.
# "Install" ansible-tower package in development mode.
develop:
@if [ "$(VIRTUAL_ENV)" ]; then \
pip uninstall -y awx; \
@ -191,6 +175,10 @@ develop:
sudo $(PYTHON) setup.py develop; \
fi
version_file:
mkdir -p /var/lib/awx/
python -c "import awx as awx; print awx.__version__" > /var/lib/awx/.tower_version
# Do any one-time init tasks.
init:
@if [ "$(VIRTUAL_ENV)" ]; then \
@ -200,7 +188,7 @@ init:
fi
# Refresh development environment after pulling new code.
refresh: clean requirements develop migrate
refresh: clean requirements_dev version_file develop migrate
# Create Django superuser.
adduser:
@ -407,7 +395,7 @@ release_clean:
dist/$(SDIST_TAR_FILE):
BUILD="$(BUILD)" $(PYTHON) setup.py sdist
sdist: minjs dist/$(SDIST_TAR_FILE)
sdist: minjs requirements dist/$(SDIST_TAR_FILE)
rpm-build:
mkdir -p rpm-build

File diff suppressed because it is too large Load Diff

View File

@ -1,86 +0,0 @@
Local versions of third-party packages required by Tower. Package names and
versions are listed below, along with notes on which files are included.
amqp==1.4.5 (amqp/*)
ansiconv==1.0.0 (ansiconv.py, small fix, generate unicode html)
anyjson==0.3.3 (anyjson/*)
apache-libcloud==0.15.1 (libcloud/*)
argparse==1.2.1 (argparse.py, needed for Python 2.6 support)
azure==0.9.0 (azure/*)
Babel==1.3 (babel/*, excluded bin/pybabel)
billiard==3.3.0.16 (billiard/*, funtests/*, excluded _billiard.so)
boto==2.34.0 (boto/*, excluded bin/asadmin, bin/bundle_image, bin/cfadmin,
bin/cq, bin/cwutil, bin/dynamodb_dump, bin/dynamodb_load, bin/elbadmin,
bin/fetch_file, bin/glacier, bin/instance_events, bin/kill_instance,
bin/launch_instance, bin/list_instances, bin/lss3, bin/mturk,
bin/pyami_sendmail, bin/route53, bin/s3put, bin/sdbadmin, bin/taskadmin)
celery==3.1.10 (celery/*, excluded bin/celery*)
d2to1==0.2.11 (d2to1/*)
distribute==0.7.3 (no files)
django-auth-ldap==1.1.8 (django_auth_ldap/*)
django-celery==3.1.10 (djcelery/*)
django-crum==0.6.1 (crum/*)
django-extensions==1.3.3 (django_extensions/*)
django-jsonfield==0.9.12 (jsonfield/*, minor fix in jsonfield/fields.py)
django_polymorphic==0.5.3 (polymorphic/*)
django-split-settings==0.1.1 (split_settings/*)
django-taggit==0.11.2 (taggit/*)
djangorestframework==2.3.13 (rest_framework/*)
django-rest-framework-mongoengine==1.5.4 (rest_framework_mongoengine/*)
django-qsstats-magic==0.7.2 (qsstats/*, minor fix in qsstats/__init__.py)
dogpile.cache==0.5.6 (dogpile/* into our dogpile/ along with dogpile.core)
dogpile.core==0.4.1 (dogpile/* into our dogpile/ along with dogpile.cache)
gevent-socketio==0.3.6 (socketio/*)
gevent-websocket==0.9.3 (geventwebsocket/*)
httplib2==0.9 (httplib2/*)
importlib==1.0.3 (importlib/*, needed for Python 2.6 support)
isodate==0.5.0 (isodate/*)
iso8601==0.1.10 (iso8601/*)
keyring==4.1 (keyring/*, excluded bin/keyring)
kombu==3.0.21 (kombu/*)
Markdown==2.4.1 (markdown/*, excluded bin/markdown_py)
mock==1.0.1 (mock.py)
mongoengine==0.9.0 (mongoengine/*)
netaddr==0.7.14 (netaddr/*)
os-client-config==0.6.0 (os_client_config/*)
ordereddict==1.1 (ordereddict.py, needed for Python 2.6 support)
os_diskconfig_python_novaclient_ext==0.1.2 (os_diskconfig_python_novaclient_ext/*)
os_networksv2_python_novaclient_ext==0.25 (os_networksv2_python_novaclient_ext.py)
os_virtual_interfacesv2_python_novaclient_ext==0.19 (os_virtual_interfacesv2_python_novaclient_ext.py)
oslo.i18n==1.5.0 (oslo_i18n/* oslo.i18n/* which goes in oslo/i18n... created empty __init__.py at oslo/)
oslo.config==1.9.3 (oslo_config/* oslo/config/* but not __init__.py from oslo/ used empty one instead)
oslo.serialization==1.4.0 (oslo_serialization/* oslo/serialization/* but not __init__.py from oslo/ used empty one instead)
oslo.utils==1.4.0 (oslo_utils/* oslo/utils/* but not __init__.py from oslo/ used empty one instead)
pbr==0.10.0 (pbr/*)
pexpect==3.1 (pexpect/*, excluded pxssh.py, fdpexpect.py, FSM.py, screen.py,
ANSI.py)
pip==1.5.4 (pip/*, excluded bin/pip*)
PrettyTable==0.7.2 (prettytable.py)
psphere==0.5.2 (psphere/*)
pyrax==1.9.3 (pyrax/*)
python-cinderclient==1.1.1 (cinderclient/*)
python-dateutil==2.4.0 (dateutil/*)
python-glanceclient==0.17.0 (glanceclient/*)
python-ironicclient==0.5.0 (ironicclient/*)
python-keystoneclient==1.3.0 (keystoneclient/*)
python-neutronclient==2.3.11 (neutronclient/*)
python-novaclient==2.20.0 (novaclient/*, excluded bin/nova)
python-swiftclient==2.2.0 (swiftclient/*, excluded bin/swift)
python-troveclient==1.0.9 (troveclient/*)
pytz==2014.10 (pytz/*)
pywinrm==0.0.3 (winrm/*)
rackspace-auth-openstack==1.3 (rackspace_auth_openstack/*)
rackspace-novaclient==1.4 (no files)
rax_default_network_flags_python_novaclient_ext==0.2.3 (rax_default_network_flags_python_novaclient_ext/*)
rax_scheduled_images_python_novaclient_ext==0.2.1 (rax_scheduled_images_python_novaclient_ext/*)
redis==2.10.3 (redis/*)
requests==2.5.1 (requests/*)
shade==0.5.0 (shade/*, made python2.6 safe by removing dictionary comprehension and adding NullHandler shade/__init__.py)
setuptools==12.0.5 (setuptools/*, _markerlib/*, pkg_resources/*, easy_install.py)
simplejson==3.6.0 (simplejson/*, excluded simplejson/_speedups.so)
six==1.9.0 (six.py)
South==0.8.4 (south/*)
suds==0.4 (suds/*)
stevedore==1.3.0 (stevedore/*)
xmltodict==0.9.0 (xmltodict.py)
IPy==0.83 (IPy.py)

View File

@ -1,16 +0,0 @@
try:
import ast
from _markerlib.markers import default_environment, compile, interpret
except ImportError:
if 'ast' in globals():
raise
def default_environment():
return {}
def compile(marker):
def marker_fn(environment=None, override=None):
# 'empty markers are True' heuristic won't install extra deps.
return not marker.strip()
marker_fn.__doc__ = marker
return marker_fn
def interpret(marker, environment=None, override=None):
return compile(marker)()

View File

@ -1,119 +0,0 @@
# -*- coding: utf-8 -*-
"""Interpret PEP 345 environment markers.
EXPR [in|==|!=|not in] EXPR [or|and] ...
where EXPR belongs to any of those:
python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
python_full_version = sys.version.split()[0]
os.name = os.name
sys.platform = sys.platform
platform.version = platform.version()
platform.machine = platform.machine()
platform.python_implementation = platform.python_implementation()
a free string, like '2.6', or 'win32'
"""
__all__ = ['default_environment', 'compile', 'interpret']
import ast
import os
import platform
import sys
import weakref
_builtin_compile = compile
try:
from platform import python_implementation
except ImportError:
if os.name == "java":
# Jython 2.5 has ast module, but not platform.python_implementation() function.
def python_implementation():
return "Jython"
else:
raise
# restricted set of variables
_VARS = {'sys.platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# FIXME parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os.name': os.name,
'platform.version': platform.version(),
'platform.machine': platform.machine(),
'platform.python_implementation': python_implementation(),
'extra': None # wheel extension
}
for var in list(_VARS.keys()):
if '.' in var:
_VARS[var.replace('.', '_')] = _VARS[var]
def default_environment():
"""Return copy of default PEP 385 globals dictionary."""
return dict(_VARS)
class ASTWhitelist(ast.NodeTransformer):
def __init__(self, statement):
self.statement = statement # for error messages
ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
# Bool operations
ALLOWED += (ast.And, ast.Or)
# Comparison operations
ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
def visit(self, node):
"""Ensure statement only contains allowed nodes."""
if not isinstance(node, self.ALLOWED):
raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
(self.statement,
(' ' * node.col_offset) + '^'))
return ast.NodeTransformer.visit(self, node)
def visit_Attribute(self, node):
"""Flatten one level of attribute access."""
new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
return ast.copy_location(new_node, node)
def parse_marker(marker):
tree = ast.parse(marker, mode='eval')
new_tree = ASTWhitelist(marker).generic_visit(tree)
return new_tree
def compile_marker(parsed_marker):
return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
dont_inherit=True)
_cache = weakref.WeakValueDictionary()
def compile(marker):
"""Return compiled marker as a function accepting an environment dict."""
try:
return _cache[marker]
except KeyError:
pass
if not marker.strip():
def marker_fn(environment=None, override=None):
""""""
return True
else:
compiled_marker = compile_marker(parse_marker(marker))
def marker_fn(environment=None, override=None):
"""override updates environment"""
if override is None:
override = {}
if environment is None:
environment = default_environment()
environment.update(override)
return eval(compiled_marker, environment)
marker_fn.__doc__ = marker
_cache[marker] = marker_fn
return _cache[marker]
def interpret(marker, environment=None):
return compile(marker)(environment)

View File

@ -1,70 +0,0 @@
"""Low-level AMQP client for Python (fork of amqplib)"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
VERSION = (1, 4, 5)
__version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:])
__author__ = 'Barry Pederson'
__maintainer__ = 'Ask Solem'
__contact__ = 'pyamqp@celeryproject.org'
__homepage__ = 'http://github.com/celery/py-amqp'
__docformat__ = 'restructuredtext'
# -eof meta-
#
# Pull in the public items from the various sub-modules
#
from .basic_message import Message # noqa
from .channel import Channel # noqa
from .connection import Connection # noqa
from .exceptions import ( # noqa
AMQPError,
ConnectionError,
RecoverableConnectionError,
IrrecoverableConnectionError,
ChannelError,
RecoverableChannelError,
IrrecoverableChannelError,
ConsumerCancelled,
ContentTooLarge,
NoConsumers,
ConnectionForced,
InvalidPath,
AccessRefused,
NotFound,
ResourceLocked,
PreconditionFailed,
FrameError,
FrameSyntaxError,
InvalidCommand,
ChannelNotOpen,
UnexpectedFrame,
ResourceError,
NotAllowed,
AMQPNotImplementedError,
InternalError,
error_for_code,
__all__ as _all_exceptions,
)
from .utils import promise # noqa
__all__ = [
'Connection',
'Channel',
'Message',
] + _all_exceptions

View File

@ -1,93 +0,0 @@
"""Code common to Connection and Channel objects."""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
from .exceptions import AMQPNotImplementedError, RecoverableConnectionError
from .serialization import AMQPWriter
__all__ = ['AbstractChannel']
class AbstractChannel(object):
"""Superclass for both the Connection, which is treated
as channel 0, and other user-created Channel objects.
The subclasses must have a _METHOD_MAP class property, mapping
between AMQP method signatures and Python methods.
"""
def __init__(self, connection, channel_id):
self.connection = connection
self.channel_id = channel_id
connection.channels[channel_id] = self
self.method_queue = [] # Higher level queue for methods
self.auto_decode = False
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
def _send_method(self, method_sig, args=bytes(), content=None):
"""Send a method for our channel."""
conn = self.connection
if conn is None:
raise RecoverableConnectionError('connection already closed')
if isinstance(args, AMQPWriter):
args = args.getvalue()
conn.method_writer.write_method(
self.channel_id, method_sig, args, content,
)
def close(self):
"""Close this Channel or Connection"""
raise NotImplementedError('Must be overriden in subclass')
def wait(self, allowed_methods=None):
"""Wait for a method that matches our allowed_methods parameter (the
default value of None means match any method), and dispatch to it."""
method_sig, args, content = self.connection._wait_method(
self.channel_id, allowed_methods)
return self.dispatch_method(method_sig, args, content)
def dispatch_method(self, method_sig, args, content):
if content and \
self.auto_decode and \
hasattr(content, 'content_encoding'):
try:
content.body = content.body.decode(content.content_encoding)
except Exception:
pass
try:
amqp_method = self._METHOD_MAP[method_sig]
except KeyError:
raise AMQPNotImplementedError(
'Unknown AMQP method {0!r}'.format(method_sig))
if content is None:
return amqp_method(self, args)
else:
return amqp_method(self, args, content)
#: Placeholder, the concrete implementations will have to
#: supply their own versions of _METHOD_MAP
_METHOD_MAP = {}

View File

@ -1,124 +0,0 @@
"""Messages for AMQP"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
from .serialization import GenericContent
__all__ = ['Message']
class Message(GenericContent):
"""A Message for use with the Channnel.basic_* methods."""
#: Instances of this class have these attributes, which
#: are passed back and forth as message properties between
#: client and server
PROPERTIES = [
('content_type', 'shortstr'),
('content_encoding', 'shortstr'),
('application_headers', 'table'),
('delivery_mode', 'octet'),
('priority', 'octet'),
('correlation_id', 'shortstr'),
('reply_to', 'shortstr'),
('expiration', 'shortstr'),
('message_id', 'shortstr'),
('timestamp', 'timestamp'),
('type', 'shortstr'),
('user_id', 'shortstr'),
('app_id', 'shortstr'),
('cluster_id', 'shortstr')
]
def __init__(self, body='', children=None, channel=None, **properties):
"""Expected arg types
body: string
children: (not supported)
Keyword properties may include:
content_type: shortstr
MIME content type
content_encoding: shortstr
MIME content encoding
application_headers: table
Message header field table, a dict with string keys,
and string | int | Decimal | datetime | dict values.
delivery_mode: octet
Non-persistent (1) or persistent (2)
priority: octet
The message priority, 0 to 9
correlation_id: shortstr
The application correlation identifier
reply_to: shortstr
The destination to reply to
expiration: shortstr
Message expiration specification
message_id: shortstr
The application message identifier
timestamp: datetime.datetime
The message timestamp
type: shortstr
The message type name
user_id: shortstr
The creating user id
app_id: shortstr
The creating application id
cluster_id: shortstr
Intra-cluster routing identifier
Unicode bodies are encoded according to the 'content_encoding'
argument. If that's None, it's set to 'UTF-8' automatically.
example::
msg = Message('hello world',
content_type='text/plain',
application_headers={'foo': 7})
"""
super(Message, self).__init__(**properties)
self.body = body
self.channel = channel
def __eq__(self, other):
"""Check if the properties and bodies of this Message and another
Message are the same.
Received messages may contain a 'delivery_info' attribute,
which isn't compared.
"""
try:
return (super(Message, self).__eq__(other) and
self.body == other.body)
except AttributeError:
return NotImplemented

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,258 +0,0 @@
"""Exceptions used by amqp"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
from struct import pack, unpack
__all__ = [
'AMQPError',
'ConnectionError', 'ChannelError',
'RecoverableConnectionError', 'IrrecoverableConnectionError',
'RecoverableChannelError', 'IrrecoverableChannelError',
'ConsumerCancelled', 'ContentTooLarge', 'NoConsumers',
'ConnectionForced', 'InvalidPath', 'AccessRefused', 'NotFound',
'ResourceLocked', 'PreconditionFailed', 'FrameError', 'FrameSyntaxError',
'InvalidCommand', 'ChannelNotOpen', 'UnexpectedFrame', 'ResourceError',
'NotAllowed', 'AMQPNotImplementedError', 'InternalError',
]
class AMQPError(Exception):
code = 0
def __init__(self, reply_text=None, method_sig=None,
method_name=None, reply_code=None):
self.message = reply_text
self.reply_code = reply_code or self.code
self.reply_text = reply_text
self.method_sig = method_sig
self.method_name = method_name or ''
if method_sig and not self.method_name:
self.method_name = METHOD_NAME_MAP.get(method_sig, '')
Exception.__init__(self, reply_code,
reply_text, method_sig, self.method_name)
def __str__(self):
if self.method:
return '{0.method}: ({0.reply_code}) {0.reply_text}'.format(self)
return self.reply_text or '<AMQPError: unknown error>'
@property
def method(self):
return self.method_name or self.method_sig
class ConnectionError(AMQPError):
pass
class ChannelError(AMQPError):
pass
class RecoverableChannelError(ChannelError):
pass
class IrrecoverableChannelError(ChannelError):
pass
class RecoverableConnectionError(ConnectionError):
pass
class IrrecoverableConnectionError(ConnectionError):
pass
class Blocked(RecoverableConnectionError):
pass
class ConsumerCancelled(RecoverableConnectionError):
pass
class ContentTooLarge(RecoverableChannelError):
code = 311
class NoConsumers(RecoverableChannelError):
code = 313
class ConnectionForced(RecoverableConnectionError):
code = 320
class InvalidPath(IrrecoverableConnectionError):
code = 402
class AccessRefused(IrrecoverableChannelError):
code = 403
class NotFound(IrrecoverableChannelError):
code = 404
class ResourceLocked(RecoverableChannelError):
code = 405
class PreconditionFailed(IrrecoverableChannelError):
code = 406
class FrameError(IrrecoverableConnectionError):
code = 501
class FrameSyntaxError(IrrecoverableConnectionError):
code = 502
class InvalidCommand(IrrecoverableConnectionError):
code = 503
class ChannelNotOpen(IrrecoverableConnectionError):
code = 504
class UnexpectedFrame(IrrecoverableConnectionError):
code = 505
class ResourceError(RecoverableConnectionError):
code = 506
class NotAllowed(IrrecoverableConnectionError):
code = 530
class AMQPNotImplementedError(IrrecoverableConnectionError):
code = 540
class InternalError(IrrecoverableConnectionError):
code = 541
ERROR_MAP = {
311: ContentTooLarge,
313: NoConsumers,
320: ConnectionForced,
402: InvalidPath,
403: AccessRefused,
404: NotFound,
405: ResourceLocked,
406: PreconditionFailed,
501: FrameError,
502: FrameSyntaxError,
503: InvalidCommand,
504: ChannelNotOpen,
505: UnexpectedFrame,
506: ResourceError,
530: NotAllowed,
540: AMQPNotImplementedError,
541: InternalError,
}
def error_for_code(code, text, method, default):
try:
return ERROR_MAP[code](text, method, reply_code=code)
except KeyError:
return default(text, method, reply_code=code)
def raise_for_code(code, text, method, default):
raise error_for_code(code, text, method, default)
METHOD_NAME_MAP = {
(10, 10): 'Connection.start',
(10, 11): 'Connection.start_ok',
(10, 20): 'Connection.secure',
(10, 21): 'Connection.secure_ok',
(10, 30): 'Connection.tune',
(10, 31): 'Connection.tune_ok',
(10, 40): 'Connection.open',
(10, 41): 'Connection.open_ok',
(10, 50): 'Connection.close',
(10, 51): 'Connection.close_ok',
(20, 10): 'Channel.open',
(20, 11): 'Channel.open_ok',
(20, 20): 'Channel.flow',
(20, 21): 'Channel.flow_ok',
(20, 40): 'Channel.close',
(20, 41): 'Channel.close_ok',
(30, 10): 'Access.request',
(30, 11): 'Access.request_ok',
(40, 10): 'Exchange.declare',
(40, 11): 'Exchange.declare_ok',
(40, 20): 'Exchange.delete',
(40, 21): 'Exchange.delete_ok',
(40, 30): 'Exchange.bind',
(40, 31): 'Exchange.bind_ok',
(40, 40): 'Exchange.unbind',
(40, 41): 'Exchange.unbind_ok',
(50, 10): 'Queue.declare',
(50, 11): 'Queue.declare_ok',
(50, 20): 'Queue.bind',
(50, 21): 'Queue.bind_ok',
(50, 30): 'Queue.purge',
(50, 31): 'Queue.purge_ok',
(50, 40): 'Queue.delete',
(50, 41): 'Queue.delete_ok',
(50, 50): 'Queue.unbind',
(50, 51): 'Queue.unbind_ok',
(60, 10): 'Basic.qos',
(60, 11): 'Basic.qos_ok',
(60, 20): 'Basic.consume',
(60, 21): 'Basic.consume_ok',
(60, 30): 'Basic.cancel',
(60, 31): 'Basic.cancel_ok',
(60, 40): 'Basic.publish',
(60, 50): 'Basic.return',
(60, 60): 'Basic.deliver',
(60, 70): 'Basic.get',
(60, 71): 'Basic.get_ok',
(60, 72): 'Basic.get_empty',
(60, 80): 'Basic.ack',
(60, 90): 'Basic.reject',
(60, 100): 'Basic.recover_async',
(60, 110): 'Basic.recover',
(60, 111): 'Basic.recover_ok',
(60, 120): 'Basic.nack',
(90, 10): 'Tx.select',
(90, 11): 'Tx.select_ok',
(90, 20): 'Tx.commit',
(90, 21): 'Tx.commit_ok',
(90, 30): 'Tx.rollback',
(90, 31): 'Tx.rollback_ok',
(85, 10): 'Confirm.select',
(85, 11): 'Confirm.select_ok',
}
for _method_id, _method_name in list(METHOD_NAME_MAP.items()):
METHOD_NAME_MAP[unpack('>I', pack('>HH', *_method_id))[0]] = _method_name

View File

@ -1,188 +0,0 @@
# -*- coding: utf-8 -*-
"""
celery.five
~~~~~~~~~~~
Compatibility implementations of features
only available in newer Python versions.
"""
from __future__ import absolute_import
############## py3k #########################################################
import sys
PY3 = sys.version_info[0] == 3
try:
reload = reload # noqa
except NameError: # pragma: no cover
from imp import reload # noqa
try:
from UserList import UserList # noqa
except ImportError: # pragma: no cover
from collections import UserList # noqa
try:
from UserDict import UserDict # noqa
except ImportError: # pragma: no cover
from collections import UserDict # noqa
if PY3:
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
map = map
string = str
string_t = str
long_t = int
text_t = str
range = range
int_types = (int, )
open_fqdn = 'builtins.open'
def items(d):
return d.items()
def keys(d):
return d.keys()
def values(d):
return d.values()
def nextfun(it):
return it.__next__
exec_ = getattr(builtins, 'exec')
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
class WhateverIO(StringIO):
def write(self, data):
if isinstance(data, bytes):
data = data.encode()
StringIO.write(self, data)
else:
import __builtin__ as builtins # noqa
from Queue import Queue, Empty # noqa
from itertools import imap as map, izip_longest as zip_longest # noqa
from StringIO import StringIO # noqa
string = unicode # noqa
string_t = basestring # noqa
text_t = unicode
long_t = long # noqa
range = xrange
int_types = (int, long)
open_fqdn = '__builtin__.open'
def items(d): # noqa
return d.iteritems()
def keys(d): # noqa
return d.iterkeys()
def values(d): # noqa
return d.itervalues()
def nextfun(it): # noqa
return it.next
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
exec_("""def reraise(tp, value, tb=None): raise tp, value, tb""")
BytesIO = WhateverIO = StringIO # noqa
def with_metaclass(Type, skip_attrs=set(['__dict__', '__weakref__'])):
"""Class decorator to set metaclass.
Works with both Python 3 and Python 3 and it does not add
an extra class in the lookup order like ``six.with_metaclass`` does
(that is -- it copies the original class instead of using inheritance).
"""
def _clone_with_metaclass(Class):
attrs = dict((key, value) for key, value in items(vars(Class))
if key not in skip_attrs)
return Type(Class.__name__, Class.__bases__, attrs)
return _clone_with_metaclass
############## time.monotonic ################################################
if sys.version_info < (3, 3):
import platform
SYSTEM = platform.system()
if SYSTEM == 'Darwin':
import ctypes
from ctypes.util import find_library
libSystem = ctypes.CDLL('libSystem.dylib')
CoreServices = ctypes.CDLL(find_library('CoreServices'),
use_errno=True)
mach_absolute_time = libSystem.mach_absolute_time
mach_absolute_time.restype = ctypes.c_uint64
absolute_to_nanoseconds = CoreServices.AbsoluteToNanoseconds
absolute_to_nanoseconds.restype = ctypes.c_uint64
absolute_to_nanoseconds.argtypes = [ctypes.c_uint64]
def _monotonic():
return absolute_to_nanoseconds(mach_absolute_time()) * 1e-9
elif SYSTEM == 'Linux':
# from stackoverflow:
# questions/1205722/how-do-i-get-monotonic-time-durations-in-python
import ctypes
import os
CLOCK_MONOTONIC = 1 # see <linux/time.h>
class timespec(ctypes.Structure):
_fields_ = [
('tv_sec', ctypes.c_long),
('tv_nsec', ctypes.c_long),
]
librt = ctypes.CDLL('librt.so.1', use_errno=True)
clock_gettime = librt.clock_gettime
clock_gettime.argtypes = [
ctypes.c_int, ctypes.POINTER(timespec),
]
def _monotonic(): # noqa
t = timespec()
if clock_gettime(CLOCK_MONOTONIC, ctypes.pointer(t)) != 0:
errno_ = ctypes.get_errno()
raise OSError(errno_, os.strerror(errno_))
return t.tv_sec + t.tv_nsec * 1e-9
else:
from time import time as _monotonic
try:
from time import monotonic
except ImportError:
monotonic = _monotonic # noqa

View File

@ -1,231 +0,0 @@
"""Convert between frames and higher-level AMQP methods"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
from collections import defaultdict, deque
from struct import pack, unpack
from .basic_message import Message
from .exceptions import AMQPError, UnexpectedFrame
from .five import range, string
from .serialization import AMQPReader
__all__ = ['MethodReader']
#
# MethodReader needs to know which methods are supposed
# to be followed by content headers and bodies.
#
_CONTENT_METHODS = [
(60, 50), # Basic.return
(60, 60), # Basic.deliver
(60, 71), # Basic.get_ok
]
class _PartialMessage(object):
"""Helper class to build up a multi-frame method."""
def __init__(self, method_sig, args, channel):
self.method_sig = method_sig
self.args = args
self.msg = Message()
self.body_parts = []
self.body_received = 0
self.body_size = None
self.complete = False
def add_header(self, payload):
class_id, weight, self.body_size = unpack('>HHQ', payload[:12])
self.msg._load_properties(payload[12:])
self.complete = (self.body_size == 0)
def add_payload(self, payload):
parts = self.body_parts
self.body_received += len(payload)
if self.body_received == self.body_size:
if parts:
parts.append(payload)
self.msg.body = bytes().join(parts)
else:
self.msg.body = payload
self.complete = True
else:
parts.append(payload)
class MethodReader(object):
"""Helper class to receive frames from the broker, combine them if
necessary with content-headers and content-bodies into complete methods.
Normally a method is represented as a tuple containing
(channel, method_sig, args, content).
In the case of a framing error, an :exc:`ConnectionError` is placed
in the queue.
In the case of unexpected frames, a tuple made up of
``(channel, ChannelError)`` is placed in the queue.
"""
def __init__(self, source):
self.source = source
self.queue = deque()
self.running = False
self.partial_messages = {}
self.heartbeats = 0
# For each channel, which type is expected next
self.expected_types = defaultdict(lambda: 1)
# not an actual byte count, just incremented whenever we receive
self.bytes_recv = 0
self._quick_put = self.queue.append
self._quick_get = self.queue.popleft
def _next_method(self):
"""Read the next method from the source, once one complete method has
been assembled it is placed in the internal queue."""
queue = self.queue
put = self._quick_put
read_frame = self.source.read_frame
while not queue:
try:
frame_type, channel, payload = read_frame()
except Exception as exc:
#
# Connection was closed? Framing Error?
#
put(exc)
break
self.bytes_recv += 1
if frame_type not in (self.expected_types[channel], 8):
put((
channel,
UnexpectedFrame(
'Received frame {0} while expecting type: {1}'.format(
frame_type, self.expected_types[channel]))))
elif frame_type == 1:
self._process_method_frame(channel, payload)
elif frame_type == 2:
self._process_content_header(channel, payload)
elif frame_type == 3:
self._process_content_body(channel, payload)
elif frame_type == 8:
self._process_heartbeat(channel, payload)
def _process_heartbeat(self, channel, payload):
self.heartbeats += 1
def _process_method_frame(self, channel, payload):
"""Process Method frames"""
method_sig = unpack('>HH', payload[:4])
args = AMQPReader(payload[4:])
if method_sig in _CONTENT_METHODS:
#
# Save what we've got so far and wait for the content-header
#
self.partial_messages[channel] = _PartialMessage(
method_sig, args, channel,
)
self.expected_types[channel] = 2
else:
self._quick_put((channel, method_sig, args, None))
def _process_content_header(self, channel, payload):
"""Process Content Header frames"""
partial = self.partial_messages[channel]
partial.add_header(payload)
if partial.complete:
#
# a bodyless message, we're done
#
self._quick_put((channel, partial.method_sig,
partial.args, partial.msg))
self.partial_messages.pop(channel, None)
self.expected_types[channel] = 1
else:
#
# wait for the content-body
#
self.expected_types[channel] = 3
def _process_content_body(self, channel, payload):
"""Process Content Body frames"""
partial = self.partial_messages[channel]
partial.add_payload(payload)
if partial.complete:
#
# Stick the message in the queue and go back to
# waiting for method frames
#
self._quick_put((channel, partial.method_sig,
partial.args, partial.msg))
self.partial_messages.pop(channel, None)
self.expected_types[channel] = 1
def read_method(self):
"""Read a method from the peer."""
self._next_method()
m = self._quick_get()
if isinstance(m, Exception):
raise m
if isinstance(m, tuple) and isinstance(m[1], AMQPError):
raise m[1]
return m
class MethodWriter(object):
"""Convert AMQP methods into AMQP frames and send them out
to the peer."""
def __init__(self, dest, frame_max):
self.dest = dest
self.frame_max = frame_max
self.bytes_sent = 0
def write_method(self, channel, method_sig, args, content=None):
write_frame = self.dest.write_frame
payload = pack('>HH', method_sig[0], method_sig[1]) + args
if content:
# do this early, so we can raise an exception if there's a
# problem with the content properties, before sending the
# first frame
body = content.body
if isinstance(body, string):
coding = content.properties.get('content_encoding', None)
if coding is None:
coding = content.properties['content_encoding'] = 'UTF-8'
body = body.encode(coding)
properties = content._serialize_properties()
write_frame(1, channel, payload)
if content:
payload = pack('>HHQ', method_sig[0], 0, len(body)) + properties
write_frame(2, channel, payload)
chunk_size = self.frame_max - 8
for i in range(0, len(body), chunk_size):
write_frame(3, channel, body[i:i + chunk_size])
self.bytes_sent += 1

View File

@ -1,13 +0,0 @@
from __future__ import absolute_import
from collections import namedtuple
queue_declare_ok_t = namedtuple(
'queue_declare_ok_t', ('queue', 'message_count', 'consumer_count'),
)
basic_return_t = namedtuple(
'basic_return_t',
('reply_code', 'reply_text', 'exchange', 'routing_key', 'message'),
)

View File

@ -1,510 +0,0 @@
"""
Convert between bytestreams and higher-level AMQP types.
2007-11-05 Barry Pederson <bp@barryp.org>
"""
# Copyright (C) 2007 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
import sys
from datetime import datetime
from decimal import Decimal
from io import BytesIO
from struct import pack, unpack
from time import mktime
from .exceptions import FrameSyntaxError
from .five import int_types, long_t, string, string_t, items
IS_PY3K = sys.version_info[0] >= 3
if IS_PY3K:
def byte(n):
return bytes([n])
else:
byte = chr
ILLEGAL_TABLE_TYPE_WITH_KEY = """\
Table type {0!r} for key {1!r} not handled by amqp. [value: {2!r}]
"""
ILLEGAL_TABLE_TYPE = """\
Table type {0!r} not handled by amqp. [value: {1!r}]
"""
class AMQPReader(object):
"""Read higher-level AMQP types from a bytestream."""
def __init__(self, source):
"""Source should be either a file-like object with a read() method, or
a plain (non-unicode) string."""
if isinstance(source, bytes):
self.input = BytesIO(source)
elif hasattr(source, 'read'):
self.input = source
else:
raise ValueError(
'AMQPReader needs a file-like object or plain string')
self.bitcount = self.bits = 0
def close(self):
self.input.close()
def read(self, n):
"""Read n bytes."""
self.bitcount = self.bits = 0
return self.input.read(n)
def read_bit(self):
"""Read a single boolean value."""
if not self.bitcount:
self.bits = ord(self.input.read(1))
self.bitcount = 8
result = (self.bits & 1) == 1
self.bits >>= 1
self.bitcount -= 1
return result
def read_octet(self):
"""Read one byte, return as an integer"""
self.bitcount = self.bits = 0
return unpack('B', self.input.read(1))[0]
def read_short(self):
"""Read an unsigned 16-bit integer"""
self.bitcount = self.bits = 0
return unpack('>H', self.input.read(2))[0]
def read_long(self):
"""Read an unsigned 32-bit integer"""
self.bitcount = self.bits = 0
return unpack('>I', self.input.read(4))[0]
def read_longlong(self):
"""Read an unsigned 64-bit integer"""
self.bitcount = self.bits = 0
return unpack('>Q', self.input.read(8))[0]
def read_float(self):
"""Read float value."""
self.bitcount = self.bits = 0
return unpack('>d', self.input.read(8))[0]
def read_shortstr(self):
"""Read a short string that's stored in up to 255 bytes.
The encoding isn't specified in the AMQP spec, so
assume it's utf-8
"""
self.bitcount = self.bits = 0
slen = unpack('B', self.input.read(1))[0]
return self.input.read(slen).decode('utf-8')
def read_longstr(self):
"""Read a string that's up to 2**32 bytes.
The encoding isn't specified in the AMQP spec, so
assume it's utf-8
"""
self.bitcount = self.bits = 0
slen = unpack('>I', self.input.read(4))[0]
return self.input.read(slen).decode('utf-8')
def read_table(self):
"""Read an AMQP table, and return as a Python dictionary."""
self.bitcount = self.bits = 0
tlen = unpack('>I', self.input.read(4))[0]
table_data = AMQPReader(self.input.read(tlen))
result = {}
while table_data.input.tell() < tlen:
name = table_data.read_shortstr()
val = table_data.read_item()
result[name] = val
return result
def read_item(self):
ftype = ord(self.input.read(1))
# 'S': long string
if ftype == 83:
val = self.read_longstr()
# 's': short string
elif ftype == 115:
val = self.read_shortstr()
# 'b': short-short int
elif ftype == 98:
val, = unpack('>B', self.input.read(1))
# 'B': short-short unsigned int
elif ftype == 66:
val, = unpack('>b', self.input.read(1))
# 'U': short int
elif ftype == 85:
val, = unpack('>h', self.input.read(2))
# 'u': short unsigned int
elif ftype == 117:
val, = unpack('>H', self.input.read(2))
# 'I': long int
elif ftype == 73:
val, = unpack('>i', self.input.read(4))
# 'i': long unsigned int
elif ftype == 105: # 'l'
val, = unpack('>I', self.input.read(4))
# 'L': long long int
elif ftype == 76:
val, = unpack('>q', self.input.read(8))
# 'l': long long unsigned int
elif ftype == 108:
val, = unpack('>Q', self.input.read(8))
# 'f': float
elif ftype == 102:
val, = unpack('>f', self.input.read(4))
# 'd': double
elif ftype == 100:
val = self.read_float()
# 'D': decimal
elif ftype == 68:
d = self.read_octet()
n, = unpack('>i', self.input.read(4))
val = Decimal(n) / Decimal(10 ** d)
# 'F': table
elif ftype == 70:
val = self.read_table() # recurse
# 'A': array
elif ftype == 65:
val = self.read_array()
# 't' (bool)
elif ftype == 116:
val = self.read_bit()
# 'T': timestamp
elif ftype == 84:
val = self.read_timestamp()
# 'V': void
elif ftype == 86:
val = None
else:
raise FrameSyntaxError(
'Unknown value in table: {0!r} ({1!r})'.format(
ftype, type(ftype)))
return val
def read_array(self):
array_length = unpack('>I', self.input.read(4))[0]
array_data = AMQPReader(self.input.read(array_length))
result = []
while array_data.input.tell() < array_length:
val = array_data.read_item()
result.append(val)
return result
def read_timestamp(self):
"""Read and AMQP timestamp, which is a 64-bit integer representing
seconds since the Unix epoch in 1-second resolution.
Return as a Python datetime.datetime object,
expressed as localtime.
"""
return datetime.fromtimestamp(self.read_longlong())
class AMQPWriter(object):
"""Convert higher-level AMQP types to bytestreams."""
def __init__(self, dest=None):
"""dest may be a file-type object (with a write() method). If None
then a BytesIO is created, and the contents can be accessed with
this class's getvalue() method."""
self.out = BytesIO() if dest is None else dest
self.bits = []
self.bitcount = 0
def _flushbits(self):
if self.bits:
out = self.out
for b in self.bits:
out.write(pack('B', b))
self.bits = []
self.bitcount = 0
def close(self):
"""Pass through if possible to any file-like destinations."""
try:
self.out.close()
except AttributeError:
pass
def flush(self):
"""Pass through if possible to any file-like destinations."""
try:
self.out.flush()
except AttributeError:
pass
def getvalue(self):
"""Get what's been encoded so far if we're working with a BytesIO."""
self._flushbits()
return self.out.getvalue()
def write(self, s):
"""Write a plain Python string with no special encoding in Python 2.x,
or bytes in Python 3.x"""
self._flushbits()
self.out.write(s)
def write_bit(self, b):
"""Write a boolean value."""
b = 1 if b else 0
shift = self.bitcount % 8
if shift == 0:
self.bits.append(0)
self.bits[-1] |= (b << shift)
self.bitcount += 1
def write_octet(self, n):
"""Write an integer as an unsigned 8-bit value."""
if n < 0 or n > 255:
raise FrameSyntaxError(
'Octet {0!r} out of range 0..255'.format(n))
self._flushbits()
self.out.write(pack('B', n))
def write_short(self, n):
"""Write an integer as an unsigned 16-bit value."""
if n < 0 or n > 65535:
raise FrameSyntaxError(
'Octet {0!r} out of range 0..65535'.format(n))
self._flushbits()
self.out.write(pack('>H', int(n)))
def write_long(self, n):
"""Write an integer as an unsigned2 32-bit value."""
if n < 0 or n >= 4294967296:
raise FrameSyntaxError(
'Octet {0!r} out of range 0..2**31-1'.format(n))
self._flushbits()
self.out.write(pack('>I', n))
def write_longlong(self, n):
"""Write an integer as an unsigned 64-bit value."""
if n < 0 or n >= 18446744073709551616:
raise FrameSyntaxError(
'Octet {0!r} out of range 0..2**64-1'.format(n))
self._flushbits()
self.out.write(pack('>Q', n))
def write_shortstr(self, s):
"""Write a string up to 255 bytes long (after any encoding).
If passed a unicode string, encode with UTF-8.
"""
self._flushbits()
if isinstance(s, string):
s = s.encode('utf-8')
if len(s) > 255:
raise FrameSyntaxError(
'Shortstring overflow ({0} > 255)'.format(len(s)))
self.write_octet(len(s))
self.out.write(s)
def write_longstr(self, s):
"""Write a string up to 2**32 bytes long after encoding.
If passed a unicode string, encode as UTF-8.
"""
self._flushbits()
if isinstance(s, string):
s = s.encode('utf-8')
self.write_long(len(s))
self.out.write(s)
def write_table(self, d):
"""Write out a Python dictionary made of up string keys, and values
that are strings, signed integers, Decimal, datetime.datetime, or
sub-dictionaries following the same constraints."""
self._flushbits()
table_data = AMQPWriter()
for k, v in items(d):
table_data.write_shortstr(k)
table_data.write_item(v, k)
table_data = table_data.getvalue()
self.write_long(len(table_data))
self.out.write(table_data)
def write_item(self, v, k=None):
if isinstance(v, (string_t, bytes)):
if isinstance(v, string):
v = v.encode('utf-8')
self.write(b'S')
self.write_longstr(v)
elif isinstance(v, bool):
self.write(pack('>cB', b't', int(v)))
elif isinstance(v, float):
self.write(pack('>cd', b'd', v))
elif isinstance(v, int_types):
self.write(pack('>ci', b'I', v))
elif isinstance(v, Decimal):
self.write(b'D')
sign, digits, exponent = v.as_tuple()
v = 0
for d in digits:
v = (v * 10) + d
if sign:
v = -v
self.write_octet(-exponent)
self.write(pack('>i', v))
elif isinstance(v, datetime):
self.write(b'T')
self.write_timestamp(v)
## FIXME: timezone ?
elif isinstance(v, dict):
self.write(b'F')
self.write_table(v)
elif isinstance(v, (list, tuple)):
self.write(b'A')
self.write_array(v)
elif v is None:
self.write(b'V')
else:
err = (ILLEGAL_TABLE_TYPE_WITH_KEY.format(type(v), k, v) if k
else ILLEGAL_TABLE_TYPE.format(type(v), v))
raise FrameSyntaxError(err)
def write_array(self, a):
array_data = AMQPWriter()
for v in a:
array_data.write_item(v)
array_data = array_data.getvalue()
self.write_long(len(array_data))
self.out.write(array_data)
def write_timestamp(self, v):
"""Write out a Python datetime.datetime object as a 64-bit integer
representing seconds since the Unix epoch."""
self.out.write(pack('>q', long_t(mktime(v.timetuple()))))
class GenericContent(object):
"""Abstract base class for AMQP content.
Subclasses should override the PROPERTIES attribute.
"""
PROPERTIES = [('dummy', 'shortstr')]
def __init__(self, **props):
"""Save the properties appropriate to this AMQP content type
in a 'properties' dictionary."""
d = {}
for propname, _ in self.PROPERTIES:
if propname in props:
d[propname] = props[propname]
# FIXME: should we ignore unknown properties?
self.properties = d
def __eq__(self, other):
"""Check if this object has the same properties as another
content object."""
try:
return self.properties == other.properties
except AttributeError:
return NotImplemented
def __getattr__(self, name):
"""Look for additional properties in the 'properties'
dictionary, and if present - the 'delivery_info'
dictionary."""
if name == '__setstate__':
# Allows pickling/unpickling to work
raise AttributeError('__setstate__')
if name in self.properties:
return self.properties[name]
if 'delivery_info' in self.__dict__ \
and name in self.delivery_info:
return self.delivery_info[name]
raise AttributeError(name)
def _load_properties(self, raw_bytes):
"""Given the raw bytes containing the property-flags and property-list
from a content-frame-header, parse and insert into a dictionary
stored in this object as an attribute named 'properties'."""
r = AMQPReader(raw_bytes)
#
# Read 16-bit shorts until we get one with a low bit set to zero
#
flags = []
while 1:
flag_bits = r.read_short()
flags.append(flag_bits)
if flag_bits & 1 == 0:
break
shift = 0
d = {}
for key, proptype in self.PROPERTIES:
if shift == 0:
if not flags:
break
flag_bits, flags = flags[0], flags[1:]
shift = 15
if flag_bits & (1 << shift):
d[key] = getattr(r, 'read_' + proptype)()
shift -= 1
self.properties = d
def _serialize_properties(self):
"""serialize the 'properties' attribute (a dictionary) into
the raw bytes making up a set of property flags and a
property list, suitable for putting into a content frame header."""
shift = 15
flag_bits = 0
flags = []
raw_bytes = AMQPWriter()
for key, proptype in self.PROPERTIES:
val = self.properties.get(key, None)
if val is not None:
if shift == 0:
flags.append(flag_bits)
flag_bits = 0
shift = 15
flag_bits |= (1 << shift)
if proptype != 'bit':
getattr(raw_bytes, 'write_' + proptype)(val)
shift -= 1
flags.append(flag_bits)
result = AMQPWriter()
for flag_bits in flags:
result.write_short(flag_bits)
result.write(raw_bytes.getvalue())
return result.getvalue()

View File

@ -1,294 +0,0 @@
# Copyright (C) 2009 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
import errno
import re
import socket
import ssl
# Jython does not have this attribute
try:
from socket import SOL_TCP
except ImportError: # pragma: no cover
from socket import IPPROTO_TCP as SOL_TCP # noqa
try:
from ssl import SSLError
except ImportError:
class SSLError(Exception): # noqa
pass
from struct import pack, unpack
from .exceptions import UnexpectedFrame
from .utils import get_errno, set_cloexec
_UNAVAIL = errno.EAGAIN, errno.EINTR, errno.ENOENT
AMQP_PORT = 5672
EMPTY_BUFFER = bytes()
# Yes, Advanced Message Queuing Protocol Protocol is redundant
AMQP_PROTOCOL_HEADER = 'AMQP\x01\x01\x00\x09'.encode('latin_1')
# Match things like: [fe80::1]:5432, from RFC 2732
IPV6_LITERAL = re.compile(r'\[([\.0-9a-f:]+)\](?::(\d+))?')
class _AbstractTransport(object):
"""Common superclass for TCP and SSL transports"""
connected = False
def __init__(self, host, connect_timeout):
self.connected = True
msg = None
port = AMQP_PORT
m = IPV6_LITERAL.match(host)
if m:
host = m.group(1)
if m.group(2):
port = int(m.group(2))
else:
if ':' in host:
host, port = host.rsplit(':', 1)
port = int(port)
self.sock = None
last_err = None
for res in socket.getaddrinfo(host, port, 0,
socket.SOCK_STREAM, SOL_TCP):
af, socktype, proto, canonname, sa = res
try:
self.sock = socket.socket(af, socktype, proto)
try:
set_cloexec(self.sock, True)
except NotImplementedError:
pass
self.sock.settimeout(connect_timeout)
self.sock.connect(sa)
except socket.error as exc:
msg = exc
self.sock.close()
self.sock = None
last_err = msg
continue
break
if not self.sock:
# Didn't connect, return the most recent error message
raise socket.error(last_err)
try:
self.sock.settimeout(None)
self.sock.setsockopt(SOL_TCP, socket.TCP_NODELAY, 1)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self._setup_transport()
self._write(AMQP_PROTOCOL_HEADER)
except (OSError, IOError, socket.error) as exc:
if get_errno(exc) not in _UNAVAIL:
self.connected = False
raise
def __del__(self):
try:
# socket module may have been collected by gc
# if this is called by a thread at shutdown.
if socket is not None:
try:
self.close()
except socket.error:
pass
finally:
self.sock = None
def _read(self, n, initial=False):
"""Read exactly n bytes from the peer"""
raise NotImplementedError('Must be overriden in subclass')
def _setup_transport(self):
"""Do any additional initialization of the class (used
by the subclasses)."""
pass
def _shutdown_transport(self):
"""Do any preliminary work in shutting down the connection."""
pass
def _write(self, s):
"""Completely write a string to the peer."""
raise NotImplementedError('Must be overriden in subclass')
def close(self):
if self.sock is not None:
self._shutdown_transport()
# Call shutdown first to make sure that pending messages
# reach the AMQP broker if the program exits after
# calling this method.
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
self.sock = None
self.connected = False
def read_frame(self, unpack=unpack):
read = self._read
try:
frame_type, channel, size = unpack('>BHI', read(7, True))
payload = read(size)
ch = ord(read(1))
except socket.timeout:
raise
except (OSError, IOError, socket.error) as exc:
# Don't disconnect for ssl read time outs
# http://bugs.python.org/issue10272
if isinstance(exc, SSLError) and 'timed out' in str(exc):
raise socket.timeout()
if get_errno(exc) not in _UNAVAIL:
self.connected = False
raise
if ch == 206: # '\xce'
return frame_type, channel, payload
else:
raise UnexpectedFrame(
'Received 0x{0:02x} while expecting 0xce'.format(ch))
def write_frame(self, frame_type, channel, payload):
size = len(payload)
try:
self._write(pack(
'>BHI%dsB' % size,
frame_type, channel, size, payload, 0xce,
))
except socket.timeout:
raise
except (OSError, IOError, socket.error) as exc:
if get_errno(exc) not in _UNAVAIL:
self.connected = False
raise
class SSLTransport(_AbstractTransport):
"""Transport that works over SSL"""
def __init__(self, host, connect_timeout, ssl):
if isinstance(ssl, dict):
self.sslopts = ssl
self._read_buffer = EMPTY_BUFFER
super(SSLTransport, self).__init__(host, connect_timeout)
def _setup_transport(self):
"""Wrap the socket in an SSL object."""
if hasattr(self, 'sslopts'):
self.sock = ssl.wrap_socket(self.sock, **self.sslopts)
else:
self.sock = ssl.wrap_socket(self.sock)
self.sock.do_handshake()
self._quick_recv = self.sock.read
def _shutdown_transport(self):
"""Unwrap a Python 2.6 SSL socket, so we can call shutdown()"""
if self.sock is not None:
try:
unwrap = self.sock.unwrap
except AttributeError:
return
self.sock = unwrap()
def _read(self, n, initial=False,
_errnos=(errno.ENOENT, errno.EAGAIN, errno.EINTR)):
# According to SSL_read(3), it can at most return 16kb of data.
# Thus, we use an internal read buffer like TCPTransport._read
# to get the exact number of bytes wanted.
recv = self._quick_recv
rbuf = self._read_buffer
try:
while len(rbuf) < n:
try:
s = recv(n - len(rbuf)) # see note above
except socket.error as exc:
# ssl.sock.read may cause ENOENT if the
# operation couldn't be performed (Issue celery#1414).
if not initial and exc.errno in _errnos:
continue
raise
if not s:
raise IOError('Socket closed')
rbuf += s
except:
self._read_buffer = rbuf
raise
result, self._read_buffer = rbuf[:n], rbuf[n:]
return result
def _write(self, s):
"""Write a string out to the SSL socket fully."""
try:
write = self.sock.write
except AttributeError:
# Works around a bug in python socket library
raise IOError('Socket closed')
else:
while s:
n = write(s)
if not n:
raise IOError('Socket closed')
s = s[n:]
class TCPTransport(_AbstractTransport):
"""Transport that deals directly with TCP socket."""
def _setup_transport(self):
"""Setup to _write() directly to the socket, and
do our own buffered reads."""
self._write = self.sock.sendall
self._read_buffer = EMPTY_BUFFER
self._quick_recv = self.sock.recv
def _read(self, n, initial=False, _errnos=(errno.EAGAIN, errno.EINTR)):
"""Read exactly n bytes from the socket"""
recv = self._quick_recv
rbuf = self._read_buffer
try:
while len(rbuf) < n:
try:
s = recv(n - len(rbuf))
except socket.error as exc:
if not initial and exc.errno in _errnos:
continue
raise
if not s:
raise IOError('Socket closed')
rbuf += s
except:
self._read_buffer = rbuf
raise
result, self._read_buffer = rbuf[:n], rbuf[n:]
return result
def create_transport(host, connect_timeout, ssl=False):
"""Given a few parameters from the Connection constructor,
select and create a subclass of _AbstractTransport."""
if ssl:
return SSLTransport(host, connect_timeout, ssl)
else:
return TCPTransport(host, connect_timeout)

View File

@ -1,102 +0,0 @@
from __future__ import absolute_import
import sys
try:
import fcntl
except ImportError:
fcntl = None # noqa
class promise(object):
if not hasattr(sys, 'pypy_version_info'):
__slots__ = tuple(
'fun args kwargs value ready failed '
' on_success on_error calls'.split()
)
def __init__(self, fun, args=(), kwargs=(),
on_success=None, on_error=None):
self.fun = fun
self.args = args
self.kwargs = kwargs
self.ready = False
self.failed = False
self.on_success = on_success
self.on_error = on_error
self.value = None
self.calls = 0
def __repr__(self):
return '<$: {0.fun.__name__}(*{0.args!r}, **{0.kwargs!r})'.format(
self,
)
def __call__(self, *args, **kwargs):
try:
self.value = self.fun(
*self.args + args if self.args else args,
**dict(self.kwargs, **kwargs) if self.kwargs else kwargs
)
except Exception as exc:
self.set_error_state(exc)
else:
if self.on_success:
self.on_success(self.value)
finally:
self.ready = True
self.calls += 1
def then(self, callback=None, on_error=None):
self.on_success = callback
self.on_error = on_error
return callback
def set_error_state(self, exc):
self.failed = True
if self.on_error is None:
raise
self.on_error(exc)
def throw(self, exc):
try:
raise exc
except exc.__class__ as with_cause:
self.set_error_state(with_cause)
def noop():
return promise(lambda *a, **k: None)
try:
from os import set_cloexec # Python 3.4?
except ImportError:
def set_cloexec(fd, cloexec): # noqa
try:
FD_CLOEXEC = fcntl.FD_CLOEXEC
except AttributeError:
raise NotImplementedError(
'close-on-exec flag not supported on this platform',
)
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
if cloexec:
flags |= FD_CLOEXEC
else:
flags &= ~FD_CLOEXEC
return fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def get_errno(exc):
""":exc:`socket.error` and :exc:`IOError` first got
the ``.errno`` attribute in Py2.7"""
try:
return exc.errno
except AttributeError:
try:
# e.args = (errno, reason)
if isinstance(exc.args, tuple) and len(exc.args) == 2:
return exc.args[0]
except AttributeError:
pass
return 0

View File

@ -1,128 +0,0 @@
"""
Converts ANSI coded text and converts it to either plain text
or to HTML.
"""
import re
supported_sgr_codes = [1, 3, 4, 9, 30, 31, 32, 33, 34, 35, 36, 37, 40, 41, 42,
43, 44, 45, 46, 47]
def to_plain(ansi):
"""Takes the given string and strips all ANSI codes out.
:param ansi: The string to strip
:return: The stripped string
"""
return re.sub(r'\x1B\[[0-9;]*[ABCDEFGHJKSTfmnsulh]', '', ansi)
def to_html(ansi, replace_newline=False):
"""Converts the given ANSI string to HTML
If `replace_newline` is set to True, then all newlines will be
replaced with <br />.
:param ansi: The ANSI text.
:param replace_newline: Whether to replace newlines with HTML.
:return: The resulting HTML string.
"""
blocks = ansi.split('\x1B')
parsed_blocks = []
for block in blocks:
command, text = _block_to_html(block)
# The command "A" means move the cursor up, so we emulate that here.
if command == 'A' and len(parsed_blocks) > 0:
parsed_blocks.pop()
while len(parsed_blocks) > 0 and '\n' not in parsed_blocks[-1]:
parsed_blocks.pop()
parsed_blocks.append(text)
text = ''.join(parsed_blocks)
if replace_newline:
text = text.replace('\n', '<br />\n')
return text
def base_css(dark=True):
"""Some base CSS with all of the default ANSI styles/colors.
:param dark: Whether background should be dark or light.
:return: A string of CSS
"""
return "\n".join([
css_rule('.ansi_fore', color=('#000000', '#FFFFFF')[dark]),
css_rule('.ansi_back', background_color=('#FFFFFF', '#000000')[dark]),
css_rule('.ansi1', font_weight='bold'),
css_rule('.ansi3', font_weight='italic'),
css_rule('.ansi4', text_decoration='underline'),
css_rule('.ansi9', text_decoration='line-through'),
css_rule('.ansi30', color="#000000"),
css_rule('.ansi31', color="#FF0000"),
css_rule('.ansi32', color="#00FF00"),
css_rule('.ansi33', color="#FFFF00"),
css_rule('.ansi34', color="#0000FF"),
css_rule('.ansi35', color="#FF00FF"),
css_rule('.ansi36', color="#00FFFF"),
css_rule('.ansi37', color="#FFFFFF"),
css_rule('.ansi40', background_color="#000000"),
css_rule('.ansi41', background_color="#FF0000"),
css_rule('.ansi42', background_color="#00FF00"),
css_rule('.ansi43', background_color="#FFFF00"),
css_rule('.ansi44', background_color="#0000FF"),
css_rule('.ansi45', background_color="#FF00FF"),
css_rule('.ansi46', background_color="#00FFFF"),
css_rule('.ansi47', background_color="#FFFFFF")
])
def css_rule(class_name, **properties):
"""Creates a CSS rule string.
The named parameters are used as the css properties. Underscores
are converted to hyphens.
:param class_name: The CSS class name
:param properties: The properties sent as named params.
:return: The CSS string
"""
prop_str = lambda name, val: name.replace('_', '-') + ': ' + val
return '{0} {{ {1}; }}'.format(
class_name,
'; '.join([prop_str(prop, properties[prop]) for prop in properties])
)
def _block_to_html(text):
"""Converts the given block of ANSI coded text to HTML.
The text is only given back as HTML if the ANSI code is at the
beginning of the string (e.g. "[0;33mFoobar")
:param text: The text block to convert.
:return: The text as HTML
"""
match = re.match(r'^\[(?P<code>\d+(?:;\d+)*)?(?P<command>[Am])', text)
if match is None:
return None, text
command = match.group('command')
text = text[match.end():]
if match.group('code') is None:
return command, text
classes = []
for code in match.group('code').split(';'):
if int(code) in supported_sgr_codes:
classes.append('ansi{0}'.format(code))
if classes:
text = u'<span class="{0}">{1}</span>'.format(' '.join(classes), text)
return command, text

View File

@ -1,142 +0,0 @@
"""Wraps the best available JSON implementation available in a common
interface"""
import sys
VERSION = (0, 3, 3)
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Rune Halvorsen"
__contact__ = "runefh@gmail.com"
__homepage__ = "http://bitbucket.org/runeh/anyjson/"
__docformat__ = "restructuredtext"
# -eof meta-
#: The json implementation object. This is probably not useful to you,
#: except to get the name of the implementation in use. The name is
#: available through ``implementation.name``.
implementation = None
# json.loads does not support buffer() objects,
# so we load() and StringIO instead, and it won't copy.
if sys.version_info[0] == 3:
from io import StringIO
else:
try:
from cStringIO import StringIO # noqa
except ImportError:
from StringIO import StringIO # noqa
#: List of known json modules, and the names of their loads/dumps
#: methods, as well as the exceptions they throw. Exception can be either
#: an exception class or a string.
_modules = [("yajl", "dumps", TypeError, "loads", ValueError, "load"),
("jsonlib2", "write", "WriteError", "read", "ReadError", None),
("jsonlib", "write", "WriteError", "read", "ReadError", None),
("simplejson", "dumps", TypeError, "loads", ValueError, "load"),
("json", "dumps", TypeError, "loads", ValueError, "load"),
("django.utils.simplejson", "dumps", TypeError, "loads", ValueError, "load"),
("cjson", "encode", "EncodeError", "decode", "DecodeError", None)
]
_fields = ("modname", "encoder", "encerror",
"decoder", "decerror", "filedecoder")
class _JsonImplementation(object):
"""Incapsulates a JSON implementation"""
def __init__(self, modspec):
modinfo = dict(zip(_fields, modspec))
if modinfo["modname"] == "cjson":
import warnings
warnings.warn("cjson is deprecated! See http://pypi.python.org/pypi/python-cjson/1.0.5", DeprecationWarning)
# No try block. We want importerror to end up at caller
module = self._attempt_load(modinfo["modname"])
self.implementation = modinfo["modname"]
self._encode = getattr(module, modinfo["encoder"])
self._decode = getattr(module, modinfo["decoder"])
fdec = modinfo["filedecoder"]
self._filedecode = fdec and getattr(module, fdec)
self._encode_error = modinfo["encerror"]
self._decode_error = modinfo["decerror"]
if isinstance(modinfo["encerror"], basestring):
self._encode_error = getattr(module, modinfo["encerror"])
if isinstance(modinfo["decerror"], basestring):
self._decode_error = getattr(module, modinfo["decerror"])
self.name = modinfo["modname"]
def __repr__(self):
return "<_JsonImplementation instance using %s>" % self.name
def _attempt_load(self, modname):
"""Attempt to load module name modname, returning it on success,
throwing ImportError if module couldn't be imported"""
__import__(modname)
return sys.modules[modname]
def dumps(self, data):
"""Serialize the datastructure to json. Returns a string. Raises
TypeError if the object could not be serialized."""
try:
return self._encode(data)
except self._encode_error, exc:
raise TypeError, TypeError(*exc.args), sys.exc_info()[2]
serialize = dumps
def loads(self, s):
"""deserialize the string to python data types. Raises
ValueError if the string could not be parsed."""
# uses StringIO to support buffer objects.
try:
if self._filedecode and not isinstance(s, basestring):
return self._filedecode(StringIO(s))
return self._decode(s)
except self._decode_error, exc:
raise ValueError, ValueError(*exc.args), sys.exc_info()[2]
deserialize = loads
def force_implementation(modname):
"""Forces anyjson to use a specific json module if it's available"""
global implementation
for name, spec in [(e[0], e) for e in _modules]:
if name == modname:
implementation = _JsonImplementation(spec)
return
raise ImportError("No module named: %s" % modname)
if __name__ == "__main__":
# If run as a script, we do nothing but print an error message.
# We do NOT try to load a compatible module because that may throw an
# exception, which renders the package uninstallable with easy_install
# (It trys to execfile the script when installing, to make sure it works)
print "Running anyjson as a stand alone script is not supported"
sys.exit(1)
else:
for modspec in _modules:
try:
implementation = _JsonImplementation(modspec)
break
except ImportError:
pass
else:
raise ImportError("No supported JSON module found")
def loads(value):
"""Serialize the object to JSON."""
return implementation.loads(value)
deserialize = loads # compat
def dumps(value):
"""Deserialize JSON-encoded object to a Python object."""
return implementation.dumps(value)
serialize = dumps

File diff suppressed because it is too large Load Diff

View File

@ -1,999 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import ast
import base64
import hashlib
import hmac
import sys
import types
import warnings
if sys.version_info < (3,):
from urllib2 import quote as url_quote
from urllib2 import unquote as url_unquote
_strtype = basestring
else:
from urllib.parse import quote as url_quote
from urllib.parse import unquote as url_unquote
_strtype = str
from datetime import datetime
from xml.dom import minidom
from xml.sax.saxutils import escape as xml_escape
#--------------------------------------------------------------------------
# constants
__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>'
__version__ = '0.9.0'
# Live ServiceClient URLs
BLOB_SERVICE_HOST_BASE = '.blob.core.windows.net'
QUEUE_SERVICE_HOST_BASE = '.queue.core.windows.net'
TABLE_SERVICE_HOST_BASE = '.table.core.windows.net'
SERVICE_BUS_HOST_BASE = '.servicebus.windows.net'
MANAGEMENT_HOST = 'management.core.windows.net'
# Development ServiceClient URLs
DEV_BLOB_HOST = '127.0.0.1:10000'
DEV_QUEUE_HOST = '127.0.0.1:10001'
DEV_TABLE_HOST = '127.0.0.1:10002'
# Default credentials for Development Storage Service
DEV_ACCOUNT_NAME = 'devstoreaccount1'
DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='
# All of our error messages
_ERROR_CANNOT_FIND_PARTITION_KEY = 'Cannot find partition key in request.'
_ERROR_CANNOT_FIND_ROW_KEY = 'Cannot find row key in request.'
_ERROR_INCORRECT_TABLE_IN_BATCH = \
'Table should be the same in a batch operations'
_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH = \
'Partition Key should be the same in a batch operations'
_ERROR_DUPLICATE_ROW_KEY_IN_BATCH = \
'Row Keys should not be the same in a batch operations'
_ERROR_BATCH_COMMIT_FAIL = 'Batch Commit Fail'
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE = \
'Message is not peek locked and cannot be deleted.'
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK = \
'Message is not peek locked and cannot be unlocked.'
_ERROR_QUEUE_NOT_FOUND = 'Queue was not found'
_ERROR_TOPIC_NOT_FOUND = 'Topic was not found'
_ERROR_CONFLICT = 'Conflict ({0})'
_ERROR_NOT_FOUND = 'Not found ({0})'
_ERROR_UNKNOWN = 'Unknown error ({0})'
_ERROR_SERVICEBUS_MISSING_INFO = \
'You need to provide servicebus namespace, access key and Issuer'
_ERROR_STORAGE_MISSING_INFO = \
'You need to provide both account name and access key'
_ERROR_ACCESS_POLICY = \
'share_access_policy must be either SignedIdentifier or AccessPolicy ' + \
'instance'
_WARNING_VALUE_SHOULD_BE_BYTES = \
'Warning: {0} must be bytes data type. It will be converted ' + \
'automatically, with utf-8 text encoding.'
_ERROR_VALUE_SHOULD_BE_BYTES = '{0} should be of type bytes.'
_ERROR_VALUE_NONE = '{0} should not be None.'
_ERROR_VALUE_NEGATIVE = '{0} should not be negative.'
_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY = \
'Cannot serialize the specified value ({0}) to an entity. Please use ' + \
'an EntityProperty (which can specify custom types), int, str, bool, ' + \
'or datetime.'
_ERROR_PAGE_BLOB_SIZE_ALIGNMENT = \
'Invalid page blob size: {0}. ' + \
'The size must be aligned to a 512-byte boundary.'
_USER_AGENT_STRING = 'pyazure/' + __version__
METADATA_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata'
class WindowsAzureData(object):
''' This is the base of data class.
It is only used to check whether it is instance or not. '''
pass
class WindowsAzureError(Exception):
''' WindowsAzure Exception base class. '''
def __init__(self, message):
super(WindowsAzureError, self).__init__(message)
class WindowsAzureConflictError(WindowsAzureError):
'''Indicates that the resource could not be created because it already
exists'''
def __init__(self, message):
super(WindowsAzureConflictError, self).__init__(message)
class WindowsAzureMissingResourceError(WindowsAzureError):
'''Indicates that a request for a request for a resource (queue, table,
container, etc...) failed because the specified resource does not exist'''
def __init__(self, message):
super(WindowsAzureMissingResourceError, self).__init__(message)
class WindowsAzureBatchOperationError(WindowsAzureError):
'''Indicates that a batch operation failed'''
def __init__(self, message, code):
super(WindowsAzureBatchOperationError, self).__init__(message)
self.code = code
class Feed(object):
pass
class _Base64String(str):
pass
class HeaderDict(dict):
def __getitem__(self, index):
return super(HeaderDict, self).__getitem__(index.lower())
def _encode_base64(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
encoded = base64.b64encode(data)
return encoded.decode('utf-8')
def _decode_base64_to_bytes(data):
if isinstance(data, _unicode_type):
data = data.encode('utf-8')
return base64.b64decode(data)
def _decode_base64_to_text(data):
decoded_bytes = _decode_base64_to_bytes(data)
return decoded_bytes.decode('utf-8')
def _get_readable_id(id_name, id_prefix_to_skip):
"""simplified an id to be more friendly for us people"""
# id_name is in the form 'https://namespace.host.suffix/name'
# where name may contain a forward slash!
pos = id_name.find('//')
if pos != -1:
pos += 2
if id_prefix_to_skip:
pos = id_name.find(id_prefix_to_skip, pos)
if pos != -1:
pos += len(id_prefix_to_skip)
pos = id_name.find('/', pos)
if pos != -1:
return id_name[pos + 1:]
return id_name
def _get_entry_properties_from_node(entry, include_id, id_prefix_to_skip=None, use_title_as_id=False):
''' get properties from entry xml '''
properties = {}
etag = entry.getAttributeNS(METADATA_NS, 'etag')
if etag:
properties['etag'] = etag
for updated in _get_child_nodes(entry, 'updated'):
properties['updated'] = updated.firstChild.nodeValue
for name in _get_children_from_path(entry, 'author', 'name'):
if name.firstChild is not None:
properties['author'] = name.firstChild.nodeValue
if include_id:
if use_title_as_id:
for title in _get_child_nodes(entry, 'title'):
properties['name'] = title.firstChild.nodeValue
else:
for id in _get_child_nodes(entry, 'id'):
properties['name'] = _get_readable_id(
id.firstChild.nodeValue, id_prefix_to_skip)
return properties
def _get_entry_properties(xmlstr, include_id, id_prefix_to_skip=None):
''' get properties from entry xml '''
xmldoc = minidom.parseString(xmlstr)
properties = {}
for entry in _get_child_nodes(xmldoc, 'entry'):
properties.update(_get_entry_properties_from_node(entry, include_id, id_prefix_to_skip))
return properties
def _get_first_child_node_value(parent_node, node_name):
xml_attrs = _get_child_nodes(parent_node, node_name)
if xml_attrs:
xml_attr = xml_attrs[0]
if xml_attr.firstChild:
value = xml_attr.firstChild.nodeValue
return value
def _get_child_nodes(node, tagName):
return [childNode for childNode in node.getElementsByTagName(tagName)
if childNode.parentNode == node]
def _get_children_from_path(node, *path):
'''descends through a hierarchy of nodes returning the list of children
at the inner most level. Only returns children who share a common parent,
not cousins.'''
cur = node
for index, child in enumerate(path):
if isinstance(child, _strtype):
next = _get_child_nodes(cur, child)
else:
next = _get_child_nodesNS(cur, *child)
if index == len(path) - 1:
return next
elif not next:
break
cur = next[0]
return []
def _get_child_nodesNS(node, ns, tagName):
return [childNode for childNode in node.getElementsByTagNameNS(ns, tagName)
if childNode.parentNode == node]
def _create_entry(entry_body):
''' Adds common part of entry to a given entry body and return the whole
xml. '''
updated_str = datetime.utcnow().isoformat()
if datetime.utcnow().utcoffset() is None:
updated_str += '+00:00'
entry_start = '''<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom" >
<title /><updated>{updated}</updated><author><name /></author><id />
<content type="application/xml">
{body}</content></entry>'''
return entry_start.format(updated=updated_str, body=entry_body)
def _to_datetime(strtime):
return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S.%f")
_KNOWN_SERIALIZATION_XFORMS = {
'include_apis': 'IncludeAPIs',
'message_id': 'MessageId',
'content_md5': 'Content-MD5',
'last_modified': 'Last-Modified',
'cache_control': 'Cache-Control',
'account_admin_live_email_id': 'AccountAdminLiveEmailId',
'service_admin_live_email_id': 'ServiceAdminLiveEmailId',
'subscription_id': 'SubscriptionID',
'fqdn': 'FQDN',
'private_id': 'PrivateID',
'os_virtual_hard_disk': 'OSVirtualHardDisk',
'logical_disk_size_in_gb': 'LogicalDiskSizeInGB',
'logical_size_in_gb': 'LogicalSizeInGB',
'os': 'OS',
'persistent_vm_downtime_info': 'PersistentVMDowntimeInfo',
'copy_id': 'CopyId',
'os_state': 'OSState',
'vm_image': 'VMImage',
'vm_images': 'VMImages',
'os_disk_configuration': 'OSDiskConfiguration',
'public_ips': 'PublicIPs',
'public_ip': 'PublicIP',
'supported_os': 'SupportedOS',
'reserved_ip': 'ReservedIP',
'reserved_ips': 'ReservedIPs',
'aad_tenant_id': 'AADTenantID',
'start_ip_address': 'StartIPAddress',
'end_ip_address': 'EndIPAddress',
}
def _get_serialization_name(element_name):
"""converts a Python name into a serializable name"""
known = _KNOWN_SERIALIZATION_XFORMS.get(element_name)
if known is not None:
return known
if element_name.startswith('x_ms_'):
return element_name.replace('_', '-')
if element_name.endswith('_id'):
element_name = element_name.replace('_id', 'ID')
for name in ['content_', 'last_modified', 'if_', 'cache_control']:
if element_name.startswith(name):
element_name = element_name.replace('_', '-_')
return ''.join(name.capitalize() for name in element_name.split('_'))
if sys.version_info < (3,):
_unicode_type = unicode
def _str(value):
if isinstance(value, unicode):
return value.encode('utf-8')
return str(value)
else:
_str = str
_unicode_type = str
def _str_or_none(value):
if value is None:
return None
return _str(value)
def _int_or_none(value):
if value is None:
return None
return str(int(value))
def _bool_or_none(value):
if value is None:
return None
if isinstance(value, bool):
if value:
return 'true'
else:
return 'false'
return str(value)
def _convert_class_to_xml(source, xml_prefix=True):
if source is None:
return ''
xmlstr = ''
if xml_prefix:
xmlstr = '<?xml version="1.0" encoding="utf-8"?>'
if isinstance(source, list):
for value in source:
xmlstr += _convert_class_to_xml(value, False)
elif isinstance(source, WindowsAzureData):
class_name = source.__class__.__name__
xmlstr += '<' + class_name + '>'
for name, value in vars(source).items():
if value is not None:
if isinstance(value, list) or \
isinstance(value, WindowsAzureData):
xmlstr += _convert_class_to_xml(value, False)
else:
xmlstr += ('<' + _get_serialization_name(name) + '>' +
xml_escape(str(value)) + '</' +
_get_serialization_name(name) + '>')
xmlstr += '</' + class_name + '>'
return xmlstr
def _find_namespaces_from_child(parent, child, namespaces):
"""Recursively searches from the parent to the child,
gathering all the applicable namespaces along the way"""
for cur_child in parent.childNodes:
if cur_child is child:
return True
if _find_namespaces_from_child(cur_child, child, namespaces):
# we are the parent node
for key in cur_child.attributes.keys():
if key.startswith('xmlns:') or key == 'xmlns':
namespaces[key] = cur_child.attributes[key]
break
return False
def _find_namespaces(parent, child):
res = {}
for key in parent.documentElement.attributes.keys():
if key.startswith('xmlns:') or key == 'xmlns':
res[key] = parent.documentElement.attributes[key]
_find_namespaces_from_child(parent, child, res)
return res
def _clone_node_with_namespaces(node_to_clone, original_doc):
clone = node_to_clone.cloneNode(True)
for key, value in _find_namespaces(original_doc, node_to_clone).items():
clone.attributes[key] = value
return clone
def _convert_response_to_feeds(response, convert_func):
if response is None:
return None
feeds = _list_of(Feed)
x_ms_continuation = HeaderDict()
for name, value in response.headers:
if 'x-ms-continuation' in name:
x_ms_continuation[name[len('x-ms-continuation') + 1:]] = value
if x_ms_continuation:
setattr(feeds, 'x_ms_continuation', x_ms_continuation)
xmldoc = minidom.parseString(response.body)
xml_entries = _get_children_from_path(xmldoc, 'feed', 'entry')
if not xml_entries:
# in some cases, response contains only entry but no feed
xml_entries = _get_children_from_path(xmldoc, 'entry')
for xml_entry in xml_entries:
new_node = _clone_node_with_namespaces(xml_entry, xmldoc)
feeds.append(convert_func(new_node.toxml('utf-8')))
return feeds
def _convert_xml_to_windows_azure_object(xmlstr, azure_type, include_id=True, use_title_as_id=True):
xmldoc = minidom.parseString(xmlstr)
return_obj = azure_type()
xml_name = azure_type._xml_name if hasattr(azure_type, '_xml_name') else azure_type.__name__
# Only one entry here
for xml_entry in _get_children_from_path(xmldoc,
'entry'):
for node in _get_children_from_path(xml_entry,
'content',
xml_name):
_fill_data_to_return_object(node, return_obj)
for name, value in _get_entry_properties_from_node(xml_entry,
include_id=include_id,
use_title_as_id=use_title_as_id).items():
setattr(return_obj, name, value)
return return_obj
def _validate_type_bytes(param_name, param):
if not isinstance(param, bytes):
raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
def _validate_not_none(param_name, param):
if param is None:
raise TypeError(_ERROR_VALUE_NONE.format(param_name))
def _fill_list_of(xmldoc, element_type, xml_element_name):
xmlelements = _get_child_nodes(xmldoc, xml_element_name)
return [_parse_response_body_from_xml_node(xmlelement, element_type) \
for xmlelement in xmlelements]
def _fill_scalar_list_of(xmldoc, element_type, parent_xml_element_name,
xml_element_name):
'''Converts an xml fragment into a list of scalar types. The parent xml
element contains a flat list of xml elements which are converted into the
specified scalar type and added to the list.
Example:
xmldoc=
<Endpoints>
<Endpoint>http://{storage-service-name}.blob.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.queue.core.windows.net/</Endpoint>
<Endpoint>http://{storage-service-name}.table.core.windows.net/</Endpoint>
</Endpoints>
element_type=str
parent_xml_element_name='Endpoints'
xml_element_name='Endpoint'
'''
xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _get_child_nodes(xmlelements[0], xml_element_name)
return [_get_node_value(xmlelement, element_type) \
for xmlelement in xmlelements]
def _fill_dict(xmldoc, element_name):
xmlelements = _get_child_nodes(xmldoc, element_name)
if xmlelements:
return_obj = {}
for child in xmlelements[0].childNodes:
if child.firstChild:
return_obj[child.nodeName] = child.firstChild.nodeValue
return return_obj
def _fill_dict_of(xmldoc, parent_xml_element_name, pair_xml_element_name,
key_xml_element_name, value_xml_element_name):
'''Converts an xml fragment into a dictionary. The parent xml element
contains a list of xml elements where each element has a child element for
the key, and another for the value.
Example:
xmldoc=
<ExtendedProperties>
<ExtendedProperty>
<Name>Ext1</Name>
<Value>Val1</Value>
</ExtendedProperty>
<ExtendedProperty>
<Name>Ext2</Name>
<Value>Val2</Value>
</ExtendedProperty>
</ExtendedProperties>
element_type=str
parent_xml_element_name='ExtendedProperties'
pair_xml_element_name='ExtendedProperty'
key_xml_element_name='Name'
value_xml_element_name='Value'
'''
return_obj = {}
xmlelements = _get_child_nodes(xmldoc, parent_xml_element_name)
if xmlelements:
xmlelements = _get_child_nodes(xmlelements[0], pair_xml_element_name)
for pair in xmlelements:
keys = _get_child_nodes(pair, key_xml_element_name)
values = _get_child_nodes(pair, value_xml_element_name)
if keys and values:
key = keys[0].firstChild.nodeValue
value = values[0].firstChild.nodeValue
return_obj[key] = value
return return_obj
def _fill_instance_child(xmldoc, element_name, return_type):
'''Converts a child of the current dom element to the specified type.
'''
xmlelements = _get_child_nodes(
xmldoc, _get_serialization_name(element_name))
if not xmlelements:
return None
return_obj = return_type()
_fill_data_to_return_object(xmlelements[0], return_obj)
return return_obj
def _fill_instance_element(element, return_type):
"""Converts a DOM element into the specified object"""
return _parse_response_body_from_xml_node(element, return_type)
def _fill_data_minidom(xmldoc, element_name, data_member):
xmlelements = _get_child_nodes(
xmldoc, _get_serialization_name(element_name))
if not xmlelements or not xmlelements[0].childNodes:
return None
value = xmlelements[0].firstChild.nodeValue
if data_member is None:
return value
elif isinstance(data_member, datetime):
return _to_datetime(value)
elif type(data_member) is bool:
return value.lower() != 'false'
else:
return type(data_member)(value)
def _get_node_value(xmlelement, data_type):
value = xmlelement.firstChild.nodeValue
if data_type is datetime:
return _to_datetime(value)
elif data_type is bool:
return value.lower() != 'false'
else:
return data_type(value)
def _get_request_body_bytes_only(param_name, param_value):
'''Validates the request body passed in and converts it to bytes
if our policy allows it.'''
if param_value is None:
return b''
if isinstance(param_value, bytes):
return param_value
# Previous versions of the SDK allowed data types other than bytes to be
# passed in, and they would be auto-converted to bytes. We preserve this
# behavior when running under 2.7, but issue a warning.
# Python 3 support is new, so we reject anything that's not bytes.
if sys.version_info < (3,):
warnings.warn(_WARNING_VALUE_SHOULD_BE_BYTES.format(param_name))
return _get_request_body(param_value)
raise TypeError(_ERROR_VALUE_SHOULD_BE_BYTES.format(param_name))
def _get_request_body(request_body):
'''Converts an object into a request body. If it's None
we'll return an empty string, if it's one of our objects it'll
convert it to XML and return it. Otherwise we just use the object
directly'''
if request_body is None:
return b''
if isinstance(request_body, WindowsAzureData):
request_body = _convert_class_to_xml(request_body)
if isinstance(request_body, bytes):
return request_body
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
request_body = str(request_body)
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
return request_body
def _parse_enum_results_list(response, return_type, resp_type, item_type):
"""resp_body is the XML we received
resp_type is a string, such as Containers,
return_type is the type we're constructing, such as ContainerEnumResults
item_type is the type object of the item to be created, such as Container
This function then returns a ContainerEnumResults object with the
containers member populated with the results.
"""
# parsing something like:
# <EnumerationResults ... >
# <Queues>
# <Queue>
# <Something />
# <SomethingElse />
# </Queue>
# </Queues>
# </EnumerationResults>
respbody = response.body
return_obj = return_type()
doc = minidom.parseString(respbody)
items = []
for enum_results in _get_child_nodes(doc, 'EnumerationResults'):
# path is something like Queues, Queue
for child in _get_children_from_path(enum_results,
resp_type,
resp_type[:-1]):
items.append(_fill_instance_element(child, item_type))
for name, value in vars(return_obj).items():
# queues, Queues, this is the list its self which we populated
# above
if name == resp_type.lower():
# the list its self.
continue
value = _fill_data_minidom(enum_results, name, value)
if value is not None:
setattr(return_obj, name, value)
setattr(return_obj, resp_type.lower(), items)
return return_obj
def _parse_simple_list(response, type, item_type, list_name):
respbody = response.body
res = type()
res_items = []
doc = minidom.parseString(respbody)
type_name = type.__name__
item_name = item_type.__name__
for item in _get_children_from_path(doc, type_name, item_name):
res_items.append(_fill_instance_element(item, item_type))
setattr(res, list_name, res_items)
return res
def _parse_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
return _parse_response_body_from_xml_text(response.body, return_type)
def _parse_service_resources_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
return _parse_response_body_from_service_resources_xml_text(response.body, return_type)
def _fill_data_to_return_object(node, return_obj):
members = dict(vars(return_obj))
for name, value in members.items():
if isinstance(value, _list_of):
setattr(return_obj,
name,
_fill_list_of(node,
value.list_type,
value.xml_element_name))
elif isinstance(value, _scalar_list_of):
setattr(return_obj,
name,
_fill_scalar_list_of(node,
value.list_type,
_get_serialization_name(name),
value.xml_element_name))
elif isinstance(value, _dict_of):
setattr(return_obj,
name,
_fill_dict_of(node,
_get_serialization_name(name),
value.pair_xml_element_name,
value.key_xml_element_name,
value.value_xml_element_name))
elif isinstance(value, _xml_attribute):
real_value = None
if node.hasAttribute(value.xml_element_name):
real_value = node.getAttribute(value.xml_element_name)
if real_value is not None:
setattr(return_obj, name, real_value)
elif isinstance(value, WindowsAzureData):
setattr(return_obj,
name,
_fill_instance_child(node, name, value.__class__))
elif isinstance(value, dict):
setattr(return_obj,
name,
_fill_dict(node, _get_serialization_name(name)))
elif isinstance(value, _Base64String):
value = _fill_data_minidom(node, name, '')
if value is not None:
value = _decode_base64_to_text(value)
# always set the attribute, so we don't end up returning an object
# with type _Base64String
setattr(return_obj, name, value)
else:
value = _fill_data_minidom(node, name, value)
if value is not None:
setattr(return_obj, name, value)
def _parse_response_body_from_xml_node(node, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
return_obj = return_type()
_fill_data_to_return_object(node, return_obj)
return return_obj
def _parse_response_body_from_xml_text(respbody, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
doc = minidom.parseString(respbody)
return_obj = return_type()
xml_name = return_type._xml_name if hasattr(return_type, '_xml_name') else return_type.__name__
for node in _get_child_nodes(doc, xml_name):
_fill_data_to_return_object(node, return_obj)
return return_obj
def _parse_response_body_from_service_resources_xml_text(respbody, return_type):
'''
parse the xml and fill all the data into a class of return_type
'''
doc = minidom.parseString(respbody)
return_obj = _list_of(return_type)
for node in _get_children_from_path(doc, "ServiceResources", "ServiceResource"):
local_obj = return_type()
_fill_data_to_return_object(node, local_obj)
return_obj.append(local_obj)
return return_obj
class _dict_of(dict):
"""a dict which carries with it the xml element names for key,val.
Used for deserializaion and construction of the lists"""
def __init__(self, pair_xml_element_name, key_xml_element_name,
value_xml_element_name):
self.pair_xml_element_name = pair_xml_element_name
self.key_xml_element_name = key_xml_element_name
self.value_xml_element_name = value_xml_element_name
super(_dict_of, self).__init__()
class _list_of(list):
"""a list which carries with it the type that's expected to go in it.
Used for deserializaion and construction of the lists"""
def __init__(self, list_type, xml_element_name=None):
self.list_type = list_type
if xml_element_name is None:
self.xml_element_name = list_type.__name__
else:
self.xml_element_name = xml_element_name
super(_list_of, self).__init__()
class _scalar_list_of(list):
"""a list of scalar types which carries with it the type that's
expected to go in it along with its xml element name.
Used for deserializaion and construction of the lists"""
def __init__(self, list_type, xml_element_name):
self.list_type = list_type
self.xml_element_name = xml_element_name
super(_scalar_list_of, self).__init__()
class _xml_attribute:
"""a accessor to XML attributes
expected to go in it along with its xml element name.
Used for deserialization and construction"""
def __init__(self, xml_element_name):
self.xml_element_name = xml_element_name
def _update_request_uri_query_local_storage(request, use_local_storage):
''' create correct uri and query for the request '''
uri, query = _update_request_uri_query(request)
if use_local_storage:
return '/' + DEV_ACCOUNT_NAME + uri, query
return uri, query
def _update_request_uri_query(request):
'''pulls the query string out of the URI and moves it into
the query portion of the request object. If there are already
query parameters on the request the parameters in the URI will
appear after the existing parameters'''
if '?' in request.path:
request.path, _, query_string = request.path.partition('?')
if query_string:
query_params = query_string.split('&')
for query in query_params:
if '=' in query:
name, _, value = query.partition('=')
request.query.append((name, value))
request.path = url_quote(request.path, '/()$=\',')
# add encoded queries to request.path.
if request.query:
request.path += '?'
for name, value in request.query:
if value is not None:
request.path += name + '=' + url_quote(value, '/()$=\',') + '&'
request.path = request.path[:-1]
return request.path, request.query
def _dont_fail_on_exist(error):
''' don't throw exception if the resource exists.
This is called by create_* APIs with fail_on_exist=False'''
if isinstance(error, WindowsAzureConflictError):
return False
else:
raise error
def _dont_fail_not_exist(error):
''' don't throw exception if the resource doesn't exist.
This is called by create_* APIs with fail_on_exist=False'''
if isinstance(error, WindowsAzureMissingResourceError):
return False
else:
raise error
def _general_error_handler(http_error):
''' Simple error handler for azure.'''
if http_error.status == 409:
raise WindowsAzureConflictError(
_ERROR_CONFLICT.format(str(http_error)))
elif http_error.status == 404:
raise WindowsAzureMissingResourceError(
_ERROR_NOT_FOUND.format(str(http_error)))
else:
if http_error.respbody is not None:
raise WindowsAzureError(
_ERROR_UNKNOWN.format(str(http_error)) + '\n' + \
http_error.respbody.decode('utf-8'))
else:
raise WindowsAzureError(_ERROR_UNKNOWN.format(str(http_error)))
def _parse_response_for_dict(response):
''' Extracts name-values from response header. Filter out the standard
http headers.'''
if response is None:
return None
http_headers = ['server', 'date', 'location', 'host',
'via', 'proxy-connection', 'connection']
return_dict = HeaderDict()
if response.headers:
for name, value in response.headers:
if not name.lower() in http_headers:
return_dict[name] = value
return return_dict
def _parse_response_for_dict_prefix(response, prefixes):
''' Extracts name-values for names starting with prefix from response
header. Filter out the standard http headers.'''
if response is None:
return None
return_dict = {}
orig_dict = _parse_response_for_dict(response)
if orig_dict:
for name, value in orig_dict.items():
for prefix_value in prefixes:
if name.lower().startswith(prefix_value.lower()):
return_dict[name] = value
break
return return_dict
else:
return None
def _parse_response_for_dict_filter(response, filter):
''' Extracts name-values for names in filter from response header. Filter
out the standard http headers.'''
if response is None:
return None
return_dict = {}
orig_dict = _parse_response_for_dict(response)
if orig_dict:
for name, value in orig_dict.items():
if name.lower() in filter:
return_dict[name] = value
return return_dict
else:
return None
def _sign_string(key, string_to_sign, key_is_base64=True):
if key_is_base64:
key = _decode_base64_to_bytes(key)
else:
if isinstance(key, _unicode_type):
key = key.encode('utf-8')
if isinstance(string_to_sign, _unicode_type):
string_to_sign = string_to_sign.encode('utf-8')
signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)
digest = signed_hmac_sha256.digest()
encoded_digest = _encode_base64(digest)
return encoded_digest

View File

@ -1,81 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{25b2c65a-0553-4452-8907-8b5b17544e68}</ProjectGuid>
<ProjectHome>
</ProjectHome>
<StartupFile>storage\blobservice.py</StartupFile>
<SearchPath>..</SearchPath>
<WorkingDirectory>.</WorkingDirectory>
<OutputPath>.</OutputPath>
<Name>azure</Name>
<RootNamespace>azure</RootNamespace>
<IsWindowsApplication>False</IsWindowsApplication>
<LaunchProvider>Standard Python launcher</LaunchProvider>
<CommandLineArguments />
<InterpreterPath />
<InterpreterArguments />
<InterpreterId>{2af0f10d-7135-4994-9156-5d01c9c11b7e}</InterpreterId>
<InterpreterVersion>2.7</InterpreterVersion>
<SccProjectName>SAK</SccProjectName>
<SccProvider>SAK</SccProvider>
<SccAuxPath>SAK</SccAuxPath>
<SccLocalPath>SAK</SccLocalPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
<DebugSymbols>true</DebugSymbols>
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Release' ">
<DebugSymbols>true</DebugSymbols>
<EnableUnmanagedDebugging>false</EnableUnmanagedDebugging>
</PropertyGroup>
<ItemGroup>
<Compile Include="http\batchclient.py" />
<Compile Include="http\httpclient.py" />
<Compile Include="http\requestsclient.py" />
<Compile Include="http\winhttp.py" />
<Compile Include="http\__init__.py" />
<Compile Include="servicemanagement\schedulermanagementservice.py" />
<Compile Include="servicemanagement\servicebusmanagementservice.py" />
<Compile Include="servicemanagement\servicemanagementclient.py" />
<Compile Include="servicemanagement\servicemanagementservice.py" />
<Compile Include="servicemanagement\sqldatabasemanagementservice.py" />
<Compile Include="servicemanagement\websitemanagementservice.py" />
<Compile Include="servicemanagement\__init__.py" />
<Compile Include="servicebus\servicebusservice.py" />
<Compile Include="storage\blobservice.py" />
<Compile Include="storage\queueservice.py" />
<Compile Include="storage\cloudstorageaccount.py" />
<Compile Include="storage\tableservice.py" />
<Compile Include="storage\sharedaccesssignature.py" />
<Compile Include="__init__.py" />
<Compile Include="servicebus\__init__.py" />
<Compile Include="storage\storageclient.py" />
<Compile Include="storage\__init__.py" />
</ItemGroup>
<ItemGroup>
<Folder Include="http" />
<Folder Include="servicemanagement" />
<Folder Include="servicebus" />
<Folder Include="storage" />
</ItemGroup>
<ItemGroup>
<InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\2.6" />
<InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\2.7" />
<InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\3.3" />
<InterpreterReference Include="{2af0f10d-7135-4994-9156-5d01c9c11b7e}\3.4" />
<InterpreterReference Include="{9a7a9026-48c1-4688-9d5d-e5699d47d074}\2.7" />
<InterpreterReference Include="{9a7a9026-48c1-4688-9d5d-e5699d47d074}\3.3" />
<InterpreterReference Include="{9a7a9026-48c1-4688-9d5d-e5699d47d074}\3.4" />
</ItemGroup>
<PropertyGroup>
<VisualStudioVersion Condition="'$(VisualStudioVersion)' == ''">10.0</VisualStudioVersion>
<VSToolsPath Condition="'$(VSToolsPath)' == ''">$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)</VSToolsPath>
<PtvsTargetsFile>$(VSToolsPath)\Python Tools\Microsoft.PythonTools.targets</PtvsTargetsFile>
</PropertyGroup>
<Import Condition="Exists($(PtvsTargetsFile))" Project="$(PtvsTargetsFile)" />
<Import Condition="!Exists($(PtvsTargetsFile))" Project="$(MSBuildToolsPath)\Microsoft.Common.targets" />
</Project>

View File

@ -1,73 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
HTTP_RESPONSE_NO_CONTENT = 204
class HTTPError(Exception):
''' HTTP Exception when response status code >= 300 '''
def __init__(self, status, message, respheader, respbody):
'''Creates a new HTTPError with the specified status, message,
response headers and body'''
self.status = status
self.respheader = respheader
self.respbody = respbody
Exception.__init__(self, message)
class HTTPResponse(object):
"""Represents a response from an HTTP request. An HTTPResponse has the
following attributes:
status: the status code of the response
message: the message
headers: the returned headers, as a list of (name, value) pairs
body: the body of the response
"""
def __init__(self, status, message, headers, body):
self.status = status
self.message = message
self.headers = headers
self.body = body
class HTTPRequest(object):
'''Represents an HTTP Request. An HTTP Request consists of the following
attributes:
host: the host name to connect to
method: the method to use to connect (string such as GET, POST, PUT, etc.)
path: the uri fragment
query: query parameters specified as a list of (name, value) pairs
headers: header values specified as (name, value) pairs
body: the body of the request.
protocol_override:
specify to use this protocol instead of the global one stored in
_HTTPClient.
'''
def __init__(self):
self.host = ''
self.method = ''
self.path = ''
self.query = [] # list of (name, value)
self.headers = [] # list of (header name, header value)
self.body = ''
self.protocol_override = None

View File

@ -1,339 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import sys
import uuid
from azure import (
_update_request_uri_query,
WindowsAzureError,
WindowsAzureBatchOperationError,
_get_children_from_path,
url_unquote,
_ERROR_CANNOT_FIND_PARTITION_KEY,
_ERROR_CANNOT_FIND_ROW_KEY,
_ERROR_INCORRECT_TABLE_IN_BATCH,
_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH,
_ERROR_DUPLICATE_ROW_KEY_IN_BATCH,
_ERROR_BATCH_COMMIT_FAIL,
)
from azure.http import HTTPError, HTTPRequest, HTTPResponse
from azure.http.httpclient import _HTTPClient
from azure.storage import (
_update_storage_table_header,
METADATA_NS,
_sign_storage_table_request,
)
from xml.dom import minidom
_DATASERVICES_NS = 'http://schemas.microsoft.com/ado/2007/08/dataservices'
if sys.version_info < (3,):
def _new_boundary():
return str(uuid.uuid1())
else:
def _new_boundary():
return str(uuid.uuid1()).encode('utf-8')
class _BatchClient(_HTTPClient):
'''
This is the class that is used for batch operation for storage table
service. It only supports one changeset.
'''
def __init__(self, service_instance, account_key, account_name,
protocol='http'):
_HTTPClient.__init__(self, service_instance, account_name=account_name,
account_key=account_key, protocol=protocol)
self.is_batch = False
self.batch_requests = []
self.batch_table = ''
self.batch_partition_key = ''
self.batch_row_keys = []
def get_request_table(self, request):
'''
Extracts table name from request.uri. The request.uri has either
"/mytable(...)" or "/mytable" format.
request: the request to insert, update or delete entity
'''
if '(' in request.path:
pos = request.path.find('(')
return request.path[1:pos]
else:
return request.path[1:]
def get_request_partition_key(self, request):
'''
Extracts PartitionKey from request.body if it is a POST request or from
request.path if it is not a POST request. Only insert operation request
is a POST request and the PartitionKey is in the request body.
request: the request to insert, update or delete entity
'''
if request.method == 'POST':
doc = minidom.parseString(request.body)
part_key = _get_children_from_path(
doc, 'entry', 'content', (METADATA_NS, 'properties'),
(_DATASERVICES_NS, 'PartitionKey'))
if not part_key:
raise WindowsAzureError(_ERROR_CANNOT_FIND_PARTITION_KEY)
return part_key[0].firstChild.nodeValue
else:
uri = url_unquote(request.path)
pos1 = uri.find('PartitionKey=\'')
pos2 = uri.find('\',', pos1)
if pos1 == -1 or pos2 == -1:
raise WindowsAzureError(_ERROR_CANNOT_FIND_PARTITION_KEY)
return uri[pos1 + len('PartitionKey=\''):pos2]
def get_request_row_key(self, request):
'''
Extracts RowKey from request.body if it is a POST request or from
request.path if it is not a POST request. Only insert operation request
is a POST request and the Rowkey is in the request body.
request: the request to insert, update or delete entity
'''
if request.method == 'POST':
doc = minidom.parseString(request.body)
row_key = _get_children_from_path(
doc, 'entry', 'content', (METADATA_NS, 'properties'),
(_DATASERVICES_NS, 'RowKey'))
if not row_key:
raise WindowsAzureError(_ERROR_CANNOT_FIND_ROW_KEY)
return row_key[0].firstChild.nodeValue
else:
uri = url_unquote(request.path)
pos1 = uri.find('RowKey=\'')
pos2 = uri.find('\')', pos1)
if pos1 == -1 or pos2 == -1:
raise WindowsAzureError(_ERROR_CANNOT_FIND_ROW_KEY)
row_key = uri[pos1 + len('RowKey=\''):pos2]
return row_key
def validate_request_table(self, request):
'''
Validates that all requests have the same table name. Set the table
name if it is the first request for the batch operation.
request: the request to insert, update or delete entity
'''
if self.batch_table:
if self.get_request_table(request) != self.batch_table:
raise WindowsAzureError(_ERROR_INCORRECT_TABLE_IN_BATCH)
else:
self.batch_table = self.get_request_table(request)
def validate_request_partition_key(self, request):
'''
Validates that all requests have the same PartitiionKey. Set the
PartitionKey if it is the first request for the batch operation.
request: the request to insert, update or delete entity
'''
if self.batch_partition_key:
if self.get_request_partition_key(request) != \
self.batch_partition_key:
raise WindowsAzureError(_ERROR_INCORRECT_PARTITION_KEY_IN_BATCH)
else:
self.batch_partition_key = self.get_request_partition_key(request)
def validate_request_row_key(self, request):
'''
Validates that all requests have the different RowKey and adds RowKey
to existing RowKey list.
request: the request to insert, update or delete entity
'''
if self.batch_row_keys:
if self.get_request_row_key(request) in self.batch_row_keys:
raise WindowsAzureError(_ERROR_DUPLICATE_ROW_KEY_IN_BATCH)
else:
self.batch_row_keys.append(self.get_request_row_key(request))
def begin_batch(self):
'''
Starts the batch operation. Intializes the batch variables
is_batch: batch operation flag.
batch_table: the table name of the batch operation
batch_partition_key: the PartitionKey of the batch requests.
batch_row_keys: the RowKey list of adding requests.
batch_requests: the list of the requests.
'''
self.is_batch = True
self.batch_table = ''
self.batch_partition_key = ''
self.batch_row_keys = []
self.batch_requests = []
def insert_request_to_batch(self, request):
'''
Adds request to batch operation.
request: the request to insert, update or delete entity
'''
self.validate_request_table(request)
self.validate_request_partition_key(request)
self.validate_request_row_key(request)
self.batch_requests.append(request)
def commit_batch(self):
''' Resets batch flag and commits the batch requests. '''
if self.is_batch:
self.is_batch = False
self.commit_batch_requests()
def commit_batch_requests(self):
''' Commits the batch requests. '''
batch_boundary = b'batch_' + _new_boundary()
changeset_boundary = b'changeset_' + _new_boundary()
# Commits batch only the requests list is not empty.
if self.batch_requests:
request = HTTPRequest()
request.method = 'POST'
request.host = self.batch_requests[0].host
request.path = '/$batch'
request.headers = [
('Content-Type', 'multipart/mixed; boundary=' + \
batch_boundary.decode('utf-8')),
('Accept', 'application/atom+xml,application/xml'),
('Accept-Charset', 'UTF-8')]
request.body = b'--' + batch_boundary + b'\n'
request.body += b'Content-Type: multipart/mixed; boundary='
request.body += changeset_boundary + b'\n\n'
content_id = 1
# Adds each request body to the POST data.
for batch_request in self.batch_requests:
request.body += b'--' + changeset_boundary + b'\n'
request.body += b'Content-Type: application/http\n'
request.body += b'Content-Transfer-Encoding: binary\n\n'
request.body += batch_request.method.encode('utf-8')
request.body += b' http://'
request.body += batch_request.host.encode('utf-8')
request.body += batch_request.path.encode('utf-8')
request.body += b' HTTP/1.1\n'
request.body += b'Content-ID: '
request.body += str(content_id).encode('utf-8') + b'\n'
content_id += 1
# Add different headers for different type requests.
if not batch_request.method == 'DELETE':
request.body += \
b'Content-Type: application/atom+xml;type=entry\n'
for name, value in batch_request.headers:
if name == 'If-Match':
request.body += name.encode('utf-8') + b': '
request.body += value.encode('utf-8') + b'\n'
break
request.body += b'Content-Length: '
request.body += str(len(batch_request.body)).encode('utf-8')
request.body += b'\n\n'
request.body += batch_request.body + b'\n'
else:
for name, value in batch_request.headers:
# If-Match should be already included in
# batch_request.headers, but in case it is missing,
# just add it.
if name == 'If-Match':
request.body += name.encode('utf-8') + b': '
request.body += value.encode('utf-8') + b'\n\n'
break
else:
request.body += b'If-Match: *\n\n'
request.body += b'--' + changeset_boundary + b'--' + b'\n'
request.body += b'--' + batch_boundary + b'--'
request.path, request.query = _update_request_uri_query(request)
request.headers = _update_storage_table_header(request)
auth = _sign_storage_table_request(request,
self.account_name,
self.account_key)
request.headers.append(('Authorization', auth))
# Submit the whole request as batch request.
response = self.perform_request(request)
if response.status >= 300:
raise HTTPError(response.status,
_ERROR_BATCH_COMMIT_FAIL,
self.respheader,
response.body)
# http://www.odata.org/documentation/odata-version-2-0/batch-processing/
# The body of a ChangeSet response is either a response for all the
# successfully processed change request within the ChangeSet,
# formatted exactly as it would have appeared outside of a batch,
# or a single response indicating a failure of the entire ChangeSet.
responses = self._parse_batch_response(response.body)
if responses and responses[0].status >= 300:
self._report_batch_error(responses[0])
def cancel_batch(self):
''' Resets the batch flag. '''
self.is_batch = False
def _parse_batch_response(self, body):
parts = body.split(b'--changesetresponse_')
responses = []
for part in parts:
httpLocation = part.find(b'HTTP/')
if httpLocation > 0:
response = self._parse_batch_response_part(part[httpLocation:])
responses.append(response)
return responses
def _parse_batch_response_part(self, part):
lines = part.splitlines();
# First line is the HTTP status/reason
status, _, reason = lines[0].partition(b' ')[2].partition(b' ')
# Followed by headers and body
headers = []
body = b''
isBody = False
for line in lines[1:]:
if line == b'' and not isBody:
isBody = True
elif isBody:
body += line
else:
headerName, _, headerVal = line.partition(b':')
headers.append((headerName.lower(), headerVal))
return HTTPResponse(int(status), reason.strip(), headers, body)
def _report_batch_error(self, response):
xml = response.body.decode('utf-8')
doc = minidom.parseString(xml)
n = _get_children_from_path(doc, (METADATA_NS, 'error'), 'code')
code = n[0].firstChild.nodeValue if n and n[0].firstChild else ''
n = _get_children_from_path(doc, (METADATA_NS, 'error'), 'message')
message = n[0].firstChild.nodeValue if n and n[0].firstChild else xml
raise WindowsAzureBatchOperationError(message, code)

View File

@ -1,251 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import base64
import os
import sys
if sys.version_info < (3,):
from httplib import (
HTTPSConnection,
HTTPConnection,
HTTP_PORT,
HTTPS_PORT,
)
from urlparse import urlparse
else:
from http.client import (
HTTPSConnection,
HTTPConnection,
HTTP_PORT,
HTTPS_PORT,
)
from urllib.parse import urlparse
from azure.http import HTTPError, HTTPResponse
from azure import _USER_AGENT_STRING, _update_request_uri_query
DEBUG_REQUESTS = False
DEBUG_RESPONSES = False
class _HTTPClient(object):
'''
Takes the request and sends it to cloud service and returns the response.
'''
def __init__(self, service_instance, cert_file=None, account_name=None,
account_key=None, protocol='https', request_session=None):
'''
service_instance: service client instance.
cert_file:
certificate file name/location. This is only used in hosted
service management.
account_name: the storage account.
account_key:
the storage account access key.
request_session:
session object created with requests library (or compatible).
'''
self.service_instance = service_instance
self.status = None
self.respheader = None
self.message = None
self.cert_file = cert_file
self.account_name = account_name
self.account_key = account_key
self.protocol = protocol
self.proxy_host = None
self.proxy_port = None
self.proxy_user = None
self.proxy_password = None
self.request_session = request_session
if request_session:
self.use_httplib = True
else:
self.use_httplib = self.should_use_httplib()
def should_use_httplib(self):
if sys.platform.lower().startswith('win') and self.cert_file:
# On Windows, auto-detect between Windows Store Certificate
# (winhttp) and OpenSSL .pem certificate file (httplib).
#
# We used to only support certificates installed in the Windows
# Certificate Store.
# cert_file example: CURRENT_USER\my\CertificateName
#
# We now support using an OpenSSL .pem certificate file,
# for a consistent experience across all platforms.
# cert_file example: account\certificate.pem
#
# When using OpenSSL .pem certificate file on Windows, make sure
# you are on CPython 2.7.4 or later.
# If it's not an existing file on disk, then treat it as a path in
# the Windows Certificate Store, which means we can't use httplib.
if not os.path.isfile(self.cert_file):
return False
return True
def set_proxy(self, host, port, user, password):
'''
Sets the proxy server host and port for the HTTP CONNECT Tunnelling.
host: Address of the proxy. Ex: '192.168.0.100'
port: Port of the proxy. Ex: 6000
user: User for proxy authorization.
password: Password for proxy authorization.
'''
self.proxy_host = host
self.proxy_port = port
self.proxy_user = user
self.proxy_password = password
def get_uri(self, request):
''' Return the target uri for the request.'''
protocol = request.protocol_override \
if request.protocol_override else self.protocol
port = HTTP_PORT if protocol == 'http' else HTTPS_PORT
return protocol + '://' + request.host + ':' + str(port) + request.path
def get_connection(self, request):
''' Create connection for the request. '''
protocol = request.protocol_override \
if request.protocol_override else self.protocol
target_host = request.host
target_port = HTTP_PORT if protocol == 'http' else HTTPS_PORT
if self.request_session:
import azure.http.requestsclient
connection = azure.http.requestsclient._RequestsConnection(
target_host, protocol, self.request_session)
#TODO: proxy stuff
elif not self.use_httplib:
import azure.http.winhttp
connection = azure.http.winhttp._HTTPConnection(
target_host, cert_file=self.cert_file, protocol=protocol)
proxy_host = self.proxy_host
proxy_port = self.proxy_port
else:
if ':' in target_host:
target_host, _, target_port = target_host.rpartition(':')
if self.proxy_host:
proxy_host = target_host
proxy_port = target_port
host = self.proxy_host
port = self.proxy_port
else:
host = target_host
port = target_port
if protocol == 'http':
connection = HTTPConnection(host, int(port))
else:
connection = HTTPSConnection(
host, int(port), cert_file=self.cert_file)
if self.proxy_host:
headers = None
if self.proxy_user and self.proxy_password:
auth = base64.encodestring(
"{0}:{1}".format(self.proxy_user, self.proxy_password))
headers = {'Proxy-Authorization': 'Basic {0}'.format(auth)}
connection.set_tunnel(proxy_host, int(proxy_port), headers)
return connection
def send_request_headers(self, connection, request_headers):
if self.use_httplib:
if self.proxy_host:
for i in connection._buffer:
if i.startswith("Host: "):
connection._buffer.remove(i)
connection.putheader(
'Host', "{0}:{1}".format(connection._tunnel_host,
connection._tunnel_port))
for name, value in request_headers:
if value:
connection.putheader(name, value)
connection.putheader('User-Agent', _USER_AGENT_STRING)
connection.endheaders()
def send_request_body(self, connection, request_body):
if request_body:
assert isinstance(request_body, bytes)
connection.send(request_body)
elif (not isinstance(connection, HTTPSConnection) and
not isinstance(connection, HTTPConnection)):
connection.send(None)
def perform_request(self, request):
''' Sends request to cloud service server and return the response. '''
connection = self.get_connection(request)
try:
connection.putrequest(request.method, request.path)
if not self.use_httplib:
if self.proxy_host and self.proxy_user:
connection.set_proxy_credentials(
self.proxy_user, self.proxy_password)
self.send_request_headers(connection, request.headers)
self.send_request_body(connection, request.body)
if DEBUG_REQUESTS and request.body:
print('request:')
try:
print(request.body)
except:
pass
resp = connection.getresponse()
self.status = int(resp.status)
self.message = resp.reason
self.respheader = headers = resp.getheaders()
# for consistency across platforms, make header names lowercase
for i, value in enumerate(headers):
headers[i] = (value[0].lower(), value[1])
respbody = None
if resp.length is None:
respbody = resp.read()
elif resp.length > 0:
respbody = resp.read(resp.length)
if DEBUG_RESPONSES and respbody:
print('response:')
try:
print(respbody)
except:
pass
response = HTTPResponse(
int(resp.status), resp.reason, headers, respbody)
if self.status == 307:
new_url = urlparse(dict(headers)['location'])
request.host = new_url.hostname
request.path = new_url.path
request.path, request.query = _update_request_uri_query(request)
return self.perform_request(request)
if self.status >= 300:
raise HTTPError(self.status, self.message,
self.respheader, respbody)
return response
finally:
connection.close()

View File

@ -1,74 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
class _Response(object):
''' Response class corresponding to the response returned from httplib
HTTPConnection. '''
def __init__(self, response):
self.status = response.status_code
self.reason = response.reason
self.respbody = response.content
self.length = len(response.content)
self.headers = []
for key, name in response.headers.items():
self.headers.append((key.lower(), name))
def getheaders(self):
'''Returns response headers.'''
return self.headers
def read(self, _length):
'''Returns response body. '''
return self.respbody[:_length]
class _RequestsConnection(object):
def __init__(self, host, protocol, session):
self.host = host
self.protocol = protocol
self.session = session
self.headers = {}
self.method = None
self.body = None
self.response = None
self.uri = None
def close(self):
pass
def set_tunnel(self, host, port=None, headers=None):
pass
def set_proxy_credentials(self, user, password):
pass
def putrequest(self, method, uri):
self.method = method
self.uri = self.protocol + '://' + self.host + uri
def putheader(self, name, value):
self.headers[name] = value
def endheaders(self):
pass
def send(self, request_body):
self.response = self.session.request(self.method, self.uri, data=request_body, headers=self.headers)
def getresponse(self):
return _Response(self.response)

View File

@ -1,471 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from ctypes import (
c_void_p,
c_long,
c_ulong,
c_longlong,
c_ulonglong,
c_short,
c_ushort,
c_wchar_p,
c_byte,
byref,
Structure,
Union,
POINTER,
WINFUNCTYPE,
HRESULT,
oledll,
WinDLL,
)
import ctypes
import sys
if sys.version_info >= (3,):
def unicode(text):
return text
#------------------------------------------------------------------------------
# Constants that are used in COM operations
VT_EMPTY = 0
VT_NULL = 1
VT_I2 = 2
VT_I4 = 3
VT_BSTR = 8
VT_BOOL = 11
VT_I1 = 16
VT_UI1 = 17
VT_UI2 = 18
VT_UI4 = 19
VT_I8 = 20
VT_UI8 = 21
VT_ARRAY = 8192
HTTPREQUEST_PROXYSETTING_PROXY = 2
HTTPREQUEST_SETCREDENTIALS_FOR_PROXY = 1
HTTPREQUEST_PROXY_SETTING = c_long
HTTPREQUEST_SETCREDENTIALS_FLAGS = c_long
#------------------------------------------------------------------------------
# Com related APIs that are used.
_ole32 = oledll.ole32
_oleaut32 = WinDLL('oleaut32')
_CLSIDFromString = _ole32.CLSIDFromString
_CoInitialize = _ole32.CoInitialize
_CoInitialize.argtypes = [c_void_p]
_CoCreateInstance = _ole32.CoCreateInstance
_SysAllocString = _oleaut32.SysAllocString
_SysAllocString.restype = c_void_p
_SysAllocString.argtypes = [c_wchar_p]
_SysFreeString = _oleaut32.SysFreeString
_SysFreeString.argtypes = [c_void_p]
# SAFEARRAY*
# SafeArrayCreateVector(_In_ VARTYPE vt,_In_ LONG lLbound,_In_ ULONG
# cElements);
_SafeArrayCreateVector = _oleaut32.SafeArrayCreateVector
_SafeArrayCreateVector.restype = c_void_p
_SafeArrayCreateVector.argtypes = [c_ushort, c_long, c_ulong]
# HRESULT
# SafeArrayAccessData(_In_ SAFEARRAY *psa, _Out_ void **ppvData);
_SafeArrayAccessData = _oleaut32.SafeArrayAccessData
_SafeArrayAccessData.argtypes = [c_void_p, POINTER(c_void_p)]
# HRESULT
# SafeArrayUnaccessData(_In_ SAFEARRAY *psa);
_SafeArrayUnaccessData = _oleaut32.SafeArrayUnaccessData
_SafeArrayUnaccessData.argtypes = [c_void_p]
# HRESULT
# SafeArrayGetUBound(_In_ SAFEARRAY *psa, _In_ UINT nDim, _Out_ LONG
# *plUbound);
_SafeArrayGetUBound = _oleaut32.SafeArrayGetUBound
_SafeArrayGetUBound.argtypes = [c_void_p, c_ulong, POINTER(c_long)]
#------------------------------------------------------------------------------
class BSTR(c_wchar_p):
''' BSTR class in python. '''
def __init__(self, value):
super(BSTR, self).__init__(_SysAllocString(value))
def __del__(self):
_SysFreeString(self)
class VARIANT(Structure):
'''
VARIANT structure in python. Does not match the definition in
MSDN exactly & it is only mapping the used fields. Field names are also
slighty different.
'''
class _tagData(Union):
class _tagRecord(Structure):
_fields_ = [('pvoid', c_void_p), ('precord', c_void_p)]
_fields_ = [('llval', c_longlong),
('ullval', c_ulonglong),
('lval', c_long),
('ulval', c_ulong),
('ival', c_short),
('boolval', c_ushort),
('bstrval', BSTR),
('parray', c_void_p),
('record', _tagRecord)]
_fields_ = [('vt', c_ushort),
('wReserved1', c_ushort),
('wReserved2', c_ushort),
('wReserved3', c_ushort),
('vdata', _tagData)]
@staticmethod
def create_empty():
variant = VARIANT()
variant.vt = VT_EMPTY
variant.vdata.llval = 0
return variant
@staticmethod
def create_safearray_from_str(text):
variant = VARIANT()
variant.vt = VT_ARRAY | VT_UI1
length = len(text)
variant.vdata.parray = _SafeArrayCreateVector(VT_UI1, 0, length)
pvdata = c_void_p()
_SafeArrayAccessData(variant.vdata.parray, byref(pvdata))
ctypes.memmove(pvdata, text, length)
_SafeArrayUnaccessData(variant.vdata.parray)
return variant
@staticmethod
def create_bstr_from_str(text):
variant = VARIANT()
variant.vt = VT_BSTR
variant.vdata.bstrval = BSTR(text)
return variant
@staticmethod
def create_bool_false():
variant = VARIANT()
variant.vt = VT_BOOL
variant.vdata.boolval = 0
return variant
def is_safearray_of_bytes(self):
return self.vt == VT_ARRAY | VT_UI1
def str_from_safearray(self):
assert self.vt == VT_ARRAY | VT_UI1
pvdata = c_void_p()
count = c_long()
_SafeArrayGetUBound(self.vdata.parray, 1, byref(count))
count = c_long(count.value + 1)
_SafeArrayAccessData(self.vdata.parray, byref(pvdata))
text = ctypes.string_at(pvdata, count)
_SafeArrayUnaccessData(self.vdata.parray)
return text
def __del__(self):
_VariantClear(self)
# HRESULT VariantClear(_Inout_ VARIANTARG *pvarg);
_VariantClear = _oleaut32.VariantClear
_VariantClear.argtypes = [POINTER(VARIANT)]
class GUID(Structure):
''' GUID structure in python. '''
_fields_ = [("data1", c_ulong),
("data2", c_ushort),
("data3", c_ushort),
("data4", c_byte * 8)]
def __init__(self, name=None):
if name is not None:
_CLSIDFromString(unicode(name), byref(self))
class _WinHttpRequest(c_void_p):
'''
Maps the Com API to Python class functions. Not all methods in
IWinHttpWebRequest are mapped - only the methods we use.
'''
_AddRef = WINFUNCTYPE(c_long) \
(1, 'AddRef')
_Release = WINFUNCTYPE(c_long) \
(2, 'Release')
_SetProxy = WINFUNCTYPE(HRESULT,
HTTPREQUEST_PROXY_SETTING,
VARIANT,
VARIANT) \
(7, 'SetProxy')
_SetCredentials = WINFUNCTYPE(HRESULT,
BSTR,
BSTR,
HTTPREQUEST_SETCREDENTIALS_FLAGS) \
(8, 'SetCredentials')
_Open = WINFUNCTYPE(HRESULT, BSTR, BSTR, VARIANT) \
(9, 'Open')
_SetRequestHeader = WINFUNCTYPE(HRESULT, BSTR, BSTR) \
(10, 'SetRequestHeader')
_GetResponseHeader = WINFUNCTYPE(HRESULT, BSTR, POINTER(c_void_p)) \
(11, 'GetResponseHeader')
_GetAllResponseHeaders = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \
(12, 'GetAllResponseHeaders')
_Send = WINFUNCTYPE(HRESULT, VARIANT) \
(13, 'Send')
_Status = WINFUNCTYPE(HRESULT, POINTER(c_long)) \
(14, 'Status')
_StatusText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \
(15, 'StatusText')
_ResponseText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \
(16, 'ResponseText')
_ResponseBody = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \
(17, 'ResponseBody')
_ResponseStream = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \
(18, 'ResponseStream')
_WaitForResponse = WINFUNCTYPE(HRESULT, VARIANT, POINTER(c_ushort)) \
(21, 'WaitForResponse')
_Abort = WINFUNCTYPE(HRESULT) \
(22, 'Abort')
_SetTimeouts = WINFUNCTYPE(HRESULT, c_long, c_long, c_long, c_long) \
(23, 'SetTimeouts')
_SetClientCertificate = WINFUNCTYPE(HRESULT, BSTR) \
(24, 'SetClientCertificate')
def open(self, method, url):
'''
Opens the request.
method: the request VERB 'GET', 'POST', etc.
url: the url to connect
'''
_WinHttpRequest._SetTimeouts(self, 0, 65000, 65000, 65000)
flag = VARIANT.create_bool_false()
_method = BSTR(method)
_url = BSTR(url)
_WinHttpRequest._Open(self, _method, _url, flag)
def set_request_header(self, name, value):
''' Sets the request header. '''
_name = BSTR(name)
_value = BSTR(value)
_WinHttpRequest._SetRequestHeader(self, _name, _value)
def get_all_response_headers(self):
''' Gets back all response headers. '''
bstr_headers = c_void_p()
_WinHttpRequest._GetAllResponseHeaders(self, byref(bstr_headers))
bstr_headers = ctypes.cast(bstr_headers, c_wchar_p)
headers = bstr_headers.value
_SysFreeString(bstr_headers)
return headers
def send(self, request=None):
''' Sends the request body. '''
# Sends VT_EMPTY if it is GET, HEAD request.
if request is None:
var_empty = VARIANT.create_empty()
_WinHttpRequest._Send(self, var_empty)
else: # Sends request body as SAFEArray.
_request = VARIANT.create_safearray_from_str(request)
_WinHttpRequest._Send(self, _request)
def status(self):
''' Gets status of response. '''
status = c_long()
_WinHttpRequest._Status(self, byref(status))
return int(status.value)
def status_text(self):
''' Gets status text of response. '''
bstr_status_text = c_void_p()
_WinHttpRequest._StatusText(self, byref(bstr_status_text))
bstr_status_text = ctypes.cast(bstr_status_text, c_wchar_p)
status_text = bstr_status_text.value
_SysFreeString(bstr_status_text)
return status_text
def response_body(self):
'''
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str.
If it is an xml file, it always contains 3 characters before <?xml,
so we remove them.
'''
var_respbody = VARIANT()
_WinHttpRequest._ResponseBody(self, byref(var_respbody))
if var_respbody.is_safearray_of_bytes():
respbody = var_respbody.str_from_safearray()
if respbody[3:].startswith(b'<?xml') and\
respbody.startswith(b'\xef\xbb\xbf'):
respbody = respbody[3:]
return respbody
else:
return ''
def set_client_certificate(self, certificate):
'''Sets client certificate for the request. '''
_certificate = BSTR(certificate)
_WinHttpRequest._SetClientCertificate(self, _certificate)
def set_tunnel(self, host, port):
''' Sets up the host and the port for the HTTP CONNECT Tunnelling.'''
url = host
if port:
url = url + u':' + port
var_host = VARIANT.create_bstr_from_str(url)
var_empty = VARIANT.create_empty()
_WinHttpRequest._SetProxy(
self, HTTPREQUEST_PROXYSETTING_PROXY, var_host, var_empty)
def set_proxy_credentials(self, user, password):
_WinHttpRequest._SetCredentials(
self, BSTR(user), BSTR(password),
HTTPREQUEST_SETCREDENTIALS_FOR_PROXY)
def __del__(self):
if self.value is not None:
_WinHttpRequest._Release(self)
class _Response(object):
''' Response class corresponding to the response returned from httplib
HTTPConnection. '''
def __init__(self, _status, _status_text, _length, _headers, _respbody):
self.status = _status
self.reason = _status_text
self.length = _length
self.headers = _headers
self.respbody = _respbody
def getheaders(self):
'''Returns response headers.'''
return self.headers
def read(self, _length):
'''Returns resonse body. '''
return self.respbody[:_length]
class _HTTPConnection(object):
''' Class corresponding to httplib HTTPConnection class. '''
def __init__(self, host, cert_file=None, key_file=None, protocol='http'):
''' initialize the IWinHttpWebRequest Com Object.'''
self.host = unicode(host)
self.cert_file = cert_file
self._httprequest = _WinHttpRequest()
self.protocol = protocol
clsid = GUID('{2087C2F4-2CEF-4953-A8AB-66779B670495}')
iid = GUID('{016FE2EC-B2C8-45F8-B23B-39E53A75396B}')
_CoInitialize(None)
_CoCreateInstance(byref(clsid), 0, 1, byref(iid),
byref(self._httprequest))
def close(self):
pass
def set_tunnel(self, host, port=None, headers=None):
''' Sets up the host and the port for the HTTP CONNECT Tunnelling. '''
self._httprequest.set_tunnel(unicode(host), unicode(str(port)))
def set_proxy_credentials(self, user, password):
self._httprequest.set_proxy_credentials(
unicode(user), unicode(password))
def putrequest(self, method, uri):
''' Connects to host and sends the request. '''
protocol = unicode(self.protocol + '://')
url = protocol + self.host + unicode(uri)
self._httprequest.open(unicode(method), url)
# sets certificate for the connection if cert_file is set.
if self.cert_file is not None:
self._httprequest.set_client_certificate(unicode(self.cert_file))
def putheader(self, name, value):
''' Sends the headers of request. '''
if sys.version_info < (3,):
name = str(name).decode('utf-8')
value = str(value).decode('utf-8')
self._httprequest.set_request_header(name, value)
def endheaders(self):
''' No operation. Exists only to provide the same interface of httplib
HTTPConnection.'''
pass
def send(self, request_body):
''' Sends request body. '''
if not request_body:
self._httprequest.send()
else:
self._httprequest.send(request_body)
def getresponse(self):
''' Gets the response and generates the _Response object'''
status = self._httprequest.status()
status_text = self._httprequest.status_text()
resp_headers = self._httprequest.get_all_response_headers()
fixed_headers = []
for resp_header in resp_headers.split('\n'):
if (resp_header.startswith('\t') or\
resp_header.startswith(' ')) and fixed_headers:
# append to previous header
fixed_headers[-1] += resp_header
else:
fixed_headers.append(resp_header)
headers = []
for resp_header in fixed_headers:
if ':' in resp_header:
pos = resp_header.find(':')
headers.append(
(resp_header[:pos].lower(), resp_header[pos + 1:].strip()))
body = self._httprequest.response_body()
length = len(body)
return _Response(status, status_text, length, headers, body)

View File

@ -1,852 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import ast
import json
import sys
from datetime import datetime
from xml.dom import minidom
from azure import (
WindowsAzureData,
WindowsAzureError,
xml_escape,
_create_entry,
_general_error_handler,
_get_entry_properties,
_get_child_nodes,
_get_children_from_path,
_get_first_child_node_value,
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE,
_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK,
_ERROR_QUEUE_NOT_FOUND,
_ERROR_TOPIC_NOT_FOUND,
)
from azure.http import HTTPError
# default rule name for subscription
DEFAULT_RULE_NAME = '$Default'
#-----------------------------------------------------------------------------
# Constants for Azure app environment settings.
AZURE_SERVICEBUS_NAMESPACE = 'AZURE_SERVICEBUS_NAMESPACE'
AZURE_SERVICEBUS_ACCESS_KEY = 'AZURE_SERVICEBUS_ACCESS_KEY'
AZURE_SERVICEBUS_ISSUER = 'AZURE_SERVICEBUS_ISSUER'
# namespace used for converting rules to objects
XML_SCHEMA_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
class Queue(WindowsAzureData):
''' Queue class corresponding to Queue Description:
http://msdn.microsoft.com/en-us/library/windowsazure/hh780773'''
def __init__(self, lock_duration=None, max_size_in_megabytes=None,
requires_duplicate_detection=None, requires_session=None,
default_message_time_to_live=None,
dead_lettering_on_message_expiration=None,
duplicate_detection_history_time_window=None,
max_delivery_count=None, enable_batched_operations=None,
size_in_bytes=None, message_count=None):
self.lock_duration = lock_duration
self.max_size_in_megabytes = max_size_in_megabytes
self.requires_duplicate_detection = requires_duplicate_detection
self.requires_session = requires_session
self.default_message_time_to_live = default_message_time_to_live
self.dead_lettering_on_message_expiration = \
dead_lettering_on_message_expiration
self.duplicate_detection_history_time_window = \
duplicate_detection_history_time_window
self.max_delivery_count = max_delivery_count
self.enable_batched_operations = enable_batched_operations
self.size_in_bytes = size_in_bytes
self.message_count = message_count
class Topic(WindowsAzureData):
''' Topic class corresponding to Topic Description:
http://msdn.microsoft.com/en-us/library/windowsazure/hh780749. '''
def __init__(self, default_message_time_to_live=None,
max_size_in_megabytes=None, requires_duplicate_detection=None,
duplicate_detection_history_time_window=None,
enable_batched_operations=None, size_in_bytes=None):
self.default_message_time_to_live = default_message_time_to_live
self.max_size_in_megabytes = max_size_in_megabytes
self.requires_duplicate_detection = requires_duplicate_detection
self.duplicate_detection_history_time_window = \
duplicate_detection_history_time_window
self.enable_batched_operations = enable_batched_operations
self.size_in_bytes = size_in_bytes
@property
def max_size_in_mega_bytes(self):
import warnings
warnings.warn(
'This attribute has been changed to max_size_in_megabytes.')
return self.max_size_in_megabytes
@max_size_in_mega_bytes.setter
def max_size_in_mega_bytes(self, value):
self.max_size_in_megabytes = value
class Subscription(WindowsAzureData):
''' Subscription class corresponding to Subscription Description:
http://msdn.microsoft.com/en-us/library/windowsazure/hh780763. '''
def __init__(self, lock_duration=None, requires_session=None,
default_message_time_to_live=None,
dead_lettering_on_message_expiration=None,
dead_lettering_on_filter_evaluation_exceptions=None,
enable_batched_operations=None, max_delivery_count=None,
message_count=None):
self.lock_duration = lock_duration
self.requires_session = requires_session
self.default_message_time_to_live = default_message_time_to_live
self.dead_lettering_on_message_expiration = \
dead_lettering_on_message_expiration
self.dead_lettering_on_filter_evaluation_exceptions = \
dead_lettering_on_filter_evaluation_exceptions
self.enable_batched_operations = enable_batched_operations
self.max_delivery_count = max_delivery_count
self.message_count = message_count
class Rule(WindowsAzureData):
''' Rule class corresponding to Rule Description:
http://msdn.microsoft.com/en-us/library/windowsazure/hh780753. '''
def __init__(self, filter_type=None, filter_expression=None,
action_type=None, action_expression=None):
self.filter_type = filter_type
self.filter_expression = filter_expression
self.action_type = action_type
self.action_expression = action_type
class Message(WindowsAzureData):
''' Message class that used in send message/get mesage apis. '''
def __init__(self, body=None, service_bus_service=None, location=None,
custom_properties=None,
type='application/atom+xml;type=entry;charset=utf-8',
broker_properties=None):
self.body = body
self.location = location
self.broker_properties = broker_properties
self.custom_properties = custom_properties
self.type = type
self.service_bus_service = service_bus_service
self._topic_name = None
self._subscription_name = None
self._queue_name = None
if not service_bus_service:
return
# if location is set, then extracts the queue name for queue message and
# extracts the topic and subscriptions name if it is topic message.
if location:
if '/subscriptions/' in location:
pos = location.find(service_bus_service.host_base.lower())+1
pos1 = location.find('/subscriptions/')
self._topic_name = location[pos+len(service_bus_service.host_base):pos1]
pos = pos1 + len('/subscriptions/')
pos1 = location.find('/', pos)
self._subscription_name = location[pos:pos1]
elif '/messages/' in location:
pos = location.find(service_bus_service.host_base.lower())+1
pos1 = location.find('/messages/')
self._queue_name = location[pos+len(service_bus_service.host_base):pos1]
def delete(self):
''' Deletes itself if find queue name or topic name and subscription
name. '''
if self._queue_name:
self.service_bus_service.delete_queue_message(
self._queue_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
elif self._topic_name and self._subscription_name:
self.service_bus_service.delete_subscription_message(
self._topic_name,
self._subscription_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
else:
raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_DELETE)
def unlock(self):
''' Unlocks itself if find queue name or topic name and subscription
name. '''
if self._queue_name:
self.service_bus_service.unlock_queue_message(
self._queue_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
elif self._topic_name and self._subscription_name:
self.service_bus_service.unlock_subscription_message(
self._topic_name,
self._subscription_name,
self.broker_properties['SequenceNumber'],
self.broker_properties['LockToken'])
else:
raise WindowsAzureError(_ERROR_MESSAGE_NOT_PEEK_LOCKED_ON_UNLOCK)
def add_headers(self, request):
''' add addtional headers to request for message request.'''
# Adds custom properties
if self.custom_properties:
for name, value in self.custom_properties.items():
if sys.version_info < (3,) and isinstance(value, unicode):
request.headers.append(
(name, '"' + value.encode('utf-8') + '"'))
elif isinstance(value, str):
request.headers.append((name, '"' + str(value) + '"'))
elif isinstance(value, datetime):
request.headers.append(
(name, '"' + value.strftime('%a, %d %b %Y %H:%M:%S GMT') + '"'))
else:
request.headers.append((name, str(value).lower()))
# Adds content-type
request.headers.append(('Content-Type', self.type))
# Adds BrokerProperties
if self.broker_properties:
request.headers.append(
('BrokerProperties', str(self.broker_properties)))
return request.headers
def _create_message(response, service_instance):
''' Create message from response.
response: response from service bus cloud server.
service_instance: the service bus client.
'''
respbody = response.body
custom_properties = {}
broker_properties = None
message_type = None
message_location = None
# gets all information from respheaders.
for name, value in response.headers:
if name.lower() == 'brokerproperties':
broker_properties = json.loads(value)
elif name.lower() == 'content-type':
message_type = value
elif name.lower() == 'location':
message_location = value
elif name.lower() not in ['content-type',
'brokerproperties',
'transfer-encoding',
'server',
'location',
'date']:
if '"' in value:
value = value[1:-1]
try:
custom_properties[name] = datetime.strptime(
value, '%a, %d %b %Y %H:%M:%S GMT')
except ValueError:
custom_properties[name] = value
else: # only int, float or boolean
if value.lower() == 'true':
custom_properties[name] = True
elif value.lower() == 'false':
custom_properties[name] = False
# int('3.1') doesn't work so need to get float('3.14') first
elif str(int(float(value))) == value:
custom_properties[name] = int(value)
else:
custom_properties[name] = float(value)
if message_type == None:
message = Message(
respbody, service_instance, message_location, custom_properties,
'application/atom+xml;type=entry;charset=utf-8', broker_properties)
else:
message = Message(respbody, service_instance, message_location,
custom_properties, message_type, broker_properties)
return message
# convert functions
def _convert_response_to_rule(response):
return _convert_xml_to_rule(response.body)
def _convert_xml_to_rule(xmlstr):
''' Converts response xml to rule object.
The format of xml for rule:
<entry xmlns='http://www.w3.org/2005/Atom'>
<content type='application/xml'>
<RuleDescription
xmlns:i="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">
<Filter i:type="SqlFilterExpression">
<SqlExpression>MyProperty='XYZ'</SqlExpression>
</Filter>
<Action i:type="SqlFilterAction">
<SqlExpression>set MyProperty2 = 'ABC'</SqlExpression>
</Action>
</RuleDescription>
</content>
</entry>
'''
xmldoc = minidom.parseString(xmlstr)
rule = Rule()
for rule_desc in _get_children_from_path(xmldoc,
'entry',
'content',
'RuleDescription'):
for xml_filter in _get_child_nodes(rule_desc, 'Filter'):
filter_type = xml_filter.getAttributeNS(
XML_SCHEMA_NAMESPACE, 'type')
setattr(rule, 'filter_type', str(filter_type))
if xml_filter.childNodes:
for expr in _get_child_nodes(xml_filter, 'SqlExpression'):
setattr(rule, 'filter_expression',
expr.firstChild.nodeValue)
for xml_action in _get_child_nodes(rule_desc, 'Action'):
action_type = xml_action.getAttributeNS(
XML_SCHEMA_NAMESPACE, 'type')
setattr(rule, 'action_type', str(action_type))
if xml_action.childNodes:
action_expression = xml_action.childNodes[0].firstChild
if action_expression:
setattr(rule, 'action_expression',
action_expression.nodeValue)
# extract id, updated and name value from feed entry and set them of rule.
for name, value in _get_entry_properties(xmlstr, True, '/rules').items():
setattr(rule, name, value)
return rule
def _convert_response_to_queue(response):
return _convert_xml_to_queue(response.body)
def _parse_bool(value):
if value.lower() == 'true':
return True
return False
def _convert_xml_to_queue(xmlstr):
''' Converts xml response to queue object.
The format of xml response for queue:
<QueueDescription
xmlns=\"http://schemas.microsoft.com/netservices/2010/10/servicebus/connect\">
<MaxSizeInBytes>10000</MaxSizeInBytes>
<DefaultMessageTimeToLive>PT5M</DefaultMessageTimeToLive>
<LockDuration>PT2M</LockDuration>
<RequiresGroupedReceives>False</RequiresGroupedReceives>
<SupportsDuplicateDetection>False</SupportsDuplicateDetection>
...
</QueueDescription>
'''
xmldoc = minidom.parseString(xmlstr)
queue = Queue()
invalid_queue = True
# get node for each attribute in Queue class, if nothing found then the
# response is not valid xml for Queue.
for desc in _get_children_from_path(xmldoc,
'entry',
'content',
'QueueDescription'):
node_value = _get_first_child_node_value(desc, 'LockDuration')
if node_value is not None:
queue.lock_duration = node_value
invalid_queue = False
node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes')
if node_value is not None:
queue.max_size_in_megabytes = int(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(
desc, 'RequiresDuplicateDetection')
if node_value is not None:
queue.requires_duplicate_detection = _parse_bool(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(desc, 'RequiresSession')
if node_value is not None:
queue.requires_session = _parse_bool(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(
desc, 'DefaultMessageTimeToLive')
if node_value is not None:
queue.default_message_time_to_live = node_value
invalid_queue = False
node_value = _get_first_child_node_value(
desc, 'DeadLetteringOnMessageExpiration')
if node_value is not None:
queue.dead_lettering_on_message_expiration = _parse_bool(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(
desc, 'DuplicateDetectionHistoryTimeWindow')
if node_value is not None:
queue.duplicate_detection_history_time_window = node_value
invalid_queue = False
node_value = _get_first_child_node_value(
desc, 'EnableBatchedOperations')
if node_value is not None:
queue.enable_batched_operations = _parse_bool(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(desc, 'MaxDeliveryCount')
if node_value is not None:
queue.max_delivery_count = int(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(desc, 'MessageCount')
if node_value is not None:
queue.message_count = int(node_value)
invalid_queue = False
node_value = _get_first_child_node_value(desc, 'SizeInBytes')
if node_value is not None:
queue.size_in_bytes = int(node_value)
invalid_queue = False
if invalid_queue:
raise WindowsAzureError(_ERROR_QUEUE_NOT_FOUND)
# extract id, updated and name value from feed entry and set them of queue.
for name, value in _get_entry_properties(xmlstr, True).items():
setattr(queue, name, value)
return queue
def _convert_response_to_topic(response):
return _convert_xml_to_topic(response.body)
def _convert_xml_to_topic(xmlstr):
'''Converts xml response to topic
The xml format for topic:
<entry xmlns='http://www.w3.org/2005/Atom'>
<content type='application/xml'>
<TopicDescription
xmlns:i="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">
<DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive>
<MaxSizeInMegabytes>1024</MaxSizeInMegabytes>
<RequiresDuplicateDetection>false</RequiresDuplicateDetection>
<DuplicateDetectionHistoryTimeWindow>P7D</DuplicateDetectionHistoryTimeWindow>
<DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions>
</TopicDescription>
</content>
</entry>
'''
xmldoc = minidom.parseString(xmlstr)
topic = Topic()
invalid_topic = True
# get node for each attribute in Topic class, if nothing found then the
# response is not valid xml for Topic.
for desc in _get_children_from_path(xmldoc,
'entry',
'content',
'TopicDescription'):
invalid_topic = True
node_value = _get_first_child_node_value(
desc, 'DefaultMessageTimeToLive')
if node_value is not None:
topic.default_message_time_to_live = node_value
invalid_topic = False
node_value = _get_first_child_node_value(desc, 'MaxSizeInMegabytes')
if node_value is not None:
topic.max_size_in_megabytes = int(node_value)
invalid_topic = False
node_value = _get_first_child_node_value(
desc, 'RequiresDuplicateDetection')
if node_value is not None:
topic.requires_duplicate_detection = _parse_bool(node_value)
invalid_topic = False
node_value = _get_first_child_node_value(
desc, 'DuplicateDetectionHistoryTimeWindow')
if node_value is not None:
topic.duplicate_detection_history_time_window = node_value
invalid_topic = False
node_value = _get_first_child_node_value(
desc, 'EnableBatchedOperations')
if node_value is not None:
topic.enable_batched_operations = _parse_bool(node_value)
invalid_topic = False
node_value = _get_first_child_node_value(desc, 'SizeInBytes')
if node_value is not None:
topic.size_in_bytes = int(node_value)
invalid_topic = False
if invalid_topic:
raise WindowsAzureError(_ERROR_TOPIC_NOT_FOUND)
# extract id, updated and name value from feed entry and set them of topic.
for name, value in _get_entry_properties(xmlstr, True).items():
setattr(topic, name, value)
return topic
def _convert_response_to_subscription(response):
return _convert_xml_to_subscription(response.body)
def _convert_xml_to_subscription(xmlstr):
'''Converts xml response to subscription
The xml format for subscription:
<entry xmlns='http://www.w3.org/2005/Atom'>
<content type='application/xml'>
<SubscriptionDescription
xmlns:i="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">
<LockDuration>PT5M</LockDuration>
<RequiresSession>false</RequiresSession>
<DefaultMessageTimeToLive>P10675199DT2H48M5.4775807S</DefaultMessageTimeToLive>
<DeadLetteringOnMessageExpiration>false</DeadLetteringOnMessageExpiration>
<DeadLetteringOnFilterEvaluationExceptions>true</DeadLetteringOnFilterEvaluationExceptions>
</SubscriptionDescription>
</content>
</entry>
'''
xmldoc = minidom.parseString(xmlstr)
subscription = Subscription()
for desc in _get_children_from_path(xmldoc,
'entry',
'content',
'SubscriptionDescription'):
node_value = _get_first_child_node_value(desc, 'LockDuration')
if node_value is not None:
subscription.lock_duration = node_value
node_value = _get_first_child_node_value(
desc, 'RequiresSession')
if node_value is not None:
subscription.requires_session = _parse_bool(node_value)
node_value = _get_first_child_node_value(
desc, 'DefaultMessageTimeToLive')
if node_value is not None:
subscription.default_message_time_to_live = node_value
node_value = _get_first_child_node_value(
desc, 'DeadLetteringOnFilterEvaluationExceptions')
if node_value is not None:
subscription.dead_lettering_on_filter_evaluation_exceptions = \
_parse_bool(node_value)
node_value = _get_first_child_node_value(
desc, 'DeadLetteringOnMessageExpiration')
if node_value is not None:
subscription.dead_lettering_on_message_expiration = \
_parse_bool(node_value)
node_value = _get_first_child_node_value(
desc, 'EnableBatchedOperations')
if node_value is not None:
subscription.enable_batched_operations = _parse_bool(node_value)
node_value = _get_first_child_node_value(
desc, 'MaxDeliveryCount')
if node_value is not None:
subscription.max_delivery_count = int(node_value)
node_value = _get_first_child_node_value(
desc, 'MessageCount')
if node_value is not None:
subscription.message_count = int(node_value)
for name, value in _get_entry_properties(xmlstr,
True,
'/subscriptions').items():
setattr(subscription, name, value)
return subscription
def _convert_subscription_to_xml(subscription):
'''
Converts a subscription object to xml to send. The order of each field of
subscription in xml is very important so we can't simple call
convert_class_to_xml.
subscription: the subsciption object to be converted.
'''
subscription_body = '<SubscriptionDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">'
if subscription:
if subscription.lock_duration is not None:
subscription_body += ''.join(
['<LockDuration>',
str(subscription.lock_duration),
'</LockDuration>'])
if subscription.requires_session is not None:
subscription_body += ''.join(
['<RequiresSession>',
str(subscription.requires_session).lower(),
'</RequiresSession>'])
if subscription.default_message_time_to_live is not None:
subscription_body += ''.join(
['<DefaultMessageTimeToLive>',
str(subscription.default_message_time_to_live),
'</DefaultMessageTimeToLive>'])
if subscription.dead_lettering_on_message_expiration is not None:
subscription_body += ''.join(
['<DeadLetteringOnMessageExpiration>',
str(subscription.dead_lettering_on_message_expiration).lower(),
'</DeadLetteringOnMessageExpiration>'])
if subscription.dead_lettering_on_filter_evaluation_exceptions is not None:
subscription_body += ''.join(
['<DeadLetteringOnFilterEvaluationExceptions>',
str(subscription.dead_lettering_on_filter_evaluation_exceptions).lower(),
'</DeadLetteringOnFilterEvaluationExceptions>'])
if subscription.enable_batched_operations is not None:
subscription_body += ''.join(
['<EnableBatchedOperations>',
str(subscription.enable_batched_operations).lower(),
'</EnableBatchedOperations>'])
if subscription.max_delivery_count is not None:
subscription_body += ''.join(
['<MaxDeliveryCount>',
str(subscription.max_delivery_count),
'</MaxDeliveryCount>'])
if subscription.message_count is not None:
subscription_body += ''.join(
['<MessageCount>',
str(subscription.message_count),
'</MessageCount>'])
subscription_body += '</SubscriptionDescription>'
return _create_entry(subscription_body)
def _convert_rule_to_xml(rule):
'''
Converts a rule object to xml to send. The order of each field of rule
in xml is very important so we cann't simple call convert_class_to_xml.
rule: the rule object to be converted.
'''
rule_body = '<RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">'
if rule:
if rule.filter_type:
rule_body += ''.join(
['<Filter i:type="',
xml_escape(rule.filter_type),
'">'])
if rule.filter_type == 'CorrelationFilter':
rule_body += ''.join(
['<CorrelationId>',
xml_escape(rule.filter_expression),
'</CorrelationId>'])
else:
rule_body += ''.join(
['<SqlExpression>',
xml_escape(rule.filter_expression),
'</SqlExpression>'])
rule_body += '<CompatibilityLevel>20</CompatibilityLevel>'
rule_body += '</Filter>'
if rule.action_type:
rule_body += ''.join(
['<Action i:type="',
xml_escape(rule.action_type),
'">'])
if rule.action_type == 'SqlRuleAction':
rule_body += ''.join(
['<SqlExpression>',
xml_escape(rule.action_expression),
'</SqlExpression>'])
rule_body += '<CompatibilityLevel>20</CompatibilityLevel>'
rule_body += '</Action>'
rule_body += '</RuleDescription>'
return _create_entry(rule_body)
def _convert_topic_to_xml(topic):
'''
Converts a topic object to xml to send. The order of each field of topic
in xml is very important so we cann't simple call convert_class_to_xml.
topic: the topic object to be converted.
'''
topic_body = '<TopicDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">'
if topic:
if topic.default_message_time_to_live is not None:
topic_body += ''.join(
['<DefaultMessageTimeToLive>',
str(topic.default_message_time_to_live),
'</DefaultMessageTimeToLive>'])
if topic.max_size_in_megabytes is not None:
topic_body += ''.join(
['<MaxSizeInMegabytes>',
str(topic.max_size_in_megabytes),
'</MaxSizeInMegabytes>'])
if topic.requires_duplicate_detection is not None:
topic_body += ''.join(
['<RequiresDuplicateDetection>',
str(topic.requires_duplicate_detection).lower(),
'</RequiresDuplicateDetection>'])
if topic.duplicate_detection_history_time_window is not None:
topic_body += ''.join(
['<DuplicateDetectionHistoryTimeWindow>',
str(topic.duplicate_detection_history_time_window),
'</DuplicateDetectionHistoryTimeWindow>'])
if topic.enable_batched_operations is not None:
topic_body += ''.join(
['<EnableBatchedOperations>',
str(topic.enable_batched_operations).lower(),
'</EnableBatchedOperations>'])
if topic.size_in_bytes is not None:
topic_body += ''.join(
['<SizeInBytes>',
str(topic.size_in_bytes),
'</SizeInBytes>'])
topic_body += '</TopicDescription>'
return _create_entry(topic_body)
def _convert_queue_to_xml(queue):
'''
Converts a queue object to xml to send. The order of each field of queue
in xml is very important so we cann't simple call convert_class_to_xml.
queue: the queue object to be converted.
'''
queue_body = '<QueueDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect">'
if queue:
if queue.lock_duration:
queue_body += ''.join(
['<LockDuration>',
str(queue.lock_duration),
'</LockDuration>'])
if queue.max_size_in_megabytes is not None:
queue_body += ''.join(
['<MaxSizeInMegabytes>',
str(queue.max_size_in_megabytes),
'</MaxSizeInMegabytes>'])
if queue.requires_duplicate_detection is not None:
queue_body += ''.join(
['<RequiresDuplicateDetection>',
str(queue.requires_duplicate_detection).lower(),
'</RequiresDuplicateDetection>'])
if queue.requires_session is not None:
queue_body += ''.join(
['<RequiresSession>',
str(queue.requires_session).lower(),
'</RequiresSession>'])
if queue.default_message_time_to_live is not None:
queue_body += ''.join(
['<DefaultMessageTimeToLive>',
str(queue.default_message_time_to_live),
'</DefaultMessageTimeToLive>'])
if queue.dead_lettering_on_message_expiration is not None:
queue_body += ''.join(
['<DeadLetteringOnMessageExpiration>',
str(queue.dead_lettering_on_message_expiration).lower(),
'</DeadLetteringOnMessageExpiration>'])
if queue.duplicate_detection_history_time_window is not None:
queue_body += ''.join(
['<DuplicateDetectionHistoryTimeWindow>',
str(queue.duplicate_detection_history_time_window),
'</DuplicateDetectionHistoryTimeWindow>'])
if queue.max_delivery_count is not None:
queue_body += ''.join(
['<MaxDeliveryCount>',
str(queue.max_delivery_count),
'</MaxDeliveryCount>'])
if queue.enable_batched_operations is not None:
queue_body += ''.join(
['<EnableBatchedOperations>',
str(queue.enable_batched_operations).lower(),
'</EnableBatchedOperations>'])
if queue.size_in_bytes is not None:
queue_body += ''.join(
['<SizeInBytes>',
str(queue.size_in_bytes),
'</SizeInBytes>'])
if queue.message_count is not None:
queue_body += ''.join(
['<MessageCount>',
str(queue.message_count),
'</MessageCount>'])
queue_body += '</QueueDescription>'
return _create_entry(queue_body)
def _service_bus_error_handler(http_error):
''' Simple error handler for service bus service. '''
return _general_error_handler(http_error)
from azure.servicebus.servicebusservice import ServiceBusService

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,70 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
MANAGEMENT_HOST,
_str
)
from azure.servicemanagement import (
CloudServices,
)
from azure.servicemanagement.servicemanagementclient import (
_ServiceManagementClient,
)
class SchedulerManagementService(_ServiceManagementClient):
''' Note that this class is a preliminary work on Scheduler
management. Since it lack a lot a features, final version
can be slightly different from the current one.
'''
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST, request_session=None):
'''
Initializes the scheduler management service.
subscription_id: Subscription to manage.
cert_file:
Path to .pem certificate file (httplib), or location of the
certificate in your Personal certificate store (winhttp) in the
CURRENT_USER\my\CertificateName format.
If a request_session is specified, then this is unused.
host: Live ServiceClient URL. Defaults to Azure public cloud.
request_session:
Session object to use for http requests. If this is specified, it
replaces the default use of httplib or winhttp. Also, the cert_file
parameter is unused when a session is passed in.
The session object handles authentication, and as such can support
multiple types of authentication: .pem certificate, oauth.
For example, you can pass in a Session instance from the requests
library. To use .pem certificate authentication with requests
library, set the path to the .pem file on the session.cert
attribute.
'''
super(SchedulerManagementService, self).__init__(
subscription_id, cert_file, host, request_session)
#--Operations for scheduler ----------------------------------------
def list_cloud_services(self):
'''
List the cloud services for scheduling defined on the account.
'''
return self._perform_get(self._get_list_cloud_services_path(),
CloudServices)
#--Helper functions --------------------------------------------------
def _get_list_cloud_services_path(self):
return self._get_path('cloudservices', None)

View File

@ -1,534 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
MANAGEMENT_HOST,
_convert_response_to_feeds,
_str,
_validate_not_none,
_convert_xml_to_windows_azure_object,
)
from azure.servicemanagement import (
_ServiceBusManagementXmlSerializer,
QueueDescription,
TopicDescription,
NotificationHubDescription,
RelayDescription,
MetricProperties,
MetricValues,
MetricRollups,
)
from azure.servicemanagement.servicemanagementclient import (
_ServiceManagementClient,
)
from functools import partial
X_MS_VERSION = '2012-03-01'
class ServiceBusManagementService(_ServiceManagementClient):
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST, request_session=None):
'''
Initializes the service bus management service.
subscription_id: Subscription to manage.
cert_file:
Path to .pem certificate file (httplib), or location of the
certificate in your Personal certificate store (winhttp) in the
CURRENT_USER\my\CertificateName format.
If a request_session is specified, then this is unused.
host: Live ServiceClient URL. Defaults to Azure public cloud.
request_session:
Session object to use for http requests. If this is specified, it
replaces the default use of httplib or winhttp. Also, the cert_file
parameter is unused when a session is passed in.
The session object handles authentication, and as such can support
multiple types of authentication: .pem certificate, oauth.
For example, you can pass in a Session instance from the requests
library. To use .pem certificate authentication with requests
library, set the path to the .pem file on the session.cert
attribute.
'''
super(ServiceBusManagementService, self).__init__(
subscription_id, cert_file, host, request_session)
self.x_ms_version = X_MS_VERSION
# Operations for service bus ----------------------------------------
def get_regions(self):
'''
Get list of available service bus regions.
'''
response = self._perform_get(
self._get_path('services/serviceBus/Regions/', None),
None)
return _convert_response_to_feeds(
response,
_ServiceBusManagementXmlSerializer.xml_to_region)
def list_namespaces(self):
'''
List the service bus namespaces defined on the account.
'''
response = self._perform_get(
self._get_path('services/serviceBus/Namespaces/', None),
None)
return _convert_response_to_feeds(
response,
_ServiceBusManagementXmlSerializer.xml_to_namespace)
def get_namespace(self, name):
'''
Get details about a specific namespace.
name: Name of the service bus namespace.
'''
response = self._perform_get(
self._get_path('services/serviceBus/Namespaces', name),
None)
return _ServiceBusManagementXmlSerializer.xml_to_namespace(
response.body)
def create_namespace(self, name, region):
'''
Create a new service bus namespace.
name: Name of the service bus namespace to create.
region: Region to create the namespace in.
'''
_validate_not_none('name', name)
return self._perform_put(
self._get_path('services/serviceBus/Namespaces', name),
_ServiceBusManagementXmlSerializer.namespace_to_xml(region))
def delete_namespace(self, name):
'''
Delete a service bus namespace.
name: Name of the service bus namespace to delete.
'''
_validate_not_none('name', name)
return self._perform_delete(
self._get_path('services/serviceBus/Namespaces', name),
None)
def check_namespace_availability(self, name):
'''
Checks to see if the specified service bus namespace is available, or
if it has already been taken.
name: Name of the service bus namespace to validate.
'''
_validate_not_none('name', name)
response = self._perform_get(
self._get_path('services/serviceBus/CheckNamespaceAvailability',
None) + '/?namespace=' + _str(name), None)
return _ServiceBusManagementXmlSerializer.xml_to_namespace_availability(
response.body)
def list_queues(self, name):
'''
Enumerates the queues in the service namespace.
name: Name of the service bus namespace.
'''
_validate_not_none('name', name)
response = self._perform_get(
self._get_list_queues_path(name),
None)
return _convert_response_to_feeds(response,
partial(_convert_xml_to_windows_azure_object,
azure_type=QueueDescription))
def list_topics(self, name):
'''
Retrieves the topics in the service namespace.
name: Name of the service bus namespace.
'''
response = self._perform_get(
self._get_list_topics_path(name),
None)
return _convert_response_to_feeds(response,
partial(_convert_xml_to_windows_azure_object,
azure_type=TopicDescription))
def list_notification_hubs(self, name):
'''
Retrieves the notification hubs in the service namespace.
name: Name of the service bus namespace.
'''
response = self._perform_get(
self._get_list_notification_hubs_path(name),
None)
return _convert_response_to_feeds(response,
partial(_convert_xml_to_windows_azure_object,
azure_type=NotificationHubDescription))
def list_relays(self, name):
'''
Retrieves the relays in the service namespace.
name: Name of the service bus namespace.
'''
response = self._perform_get(
self._get_list_relays_path(name),
None)
return _convert_response_to_feeds(response,
partial(_convert_xml_to_windows_azure_object,
azure_type=RelayDescription))
def get_supported_metrics_queue(self, name, queue_name):
'''
Retrieves the list of supported metrics for this namespace and queue
name: Name of the service bus namespace.
queue_name: Name of the service bus queue in this namespace.
'''
response = self._perform_get(
self._get_get_supported_metrics_queue_path(name, queue_name),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricProperties))
def get_supported_metrics_topic(self, name, topic_name):
'''
Retrieves the list of supported metrics for this namespace and topic
name: Name of the service bus namespace.
topic_name: Name of the service bus queue in this namespace.
'''
response = self._perform_get(
self._get_get_supported_metrics_topic_path(name, topic_name),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricProperties))
def get_supported_metrics_notification_hub(self, name, hub_name):
'''
Retrieves the list of supported metrics for this namespace and topic
name: Name of the service bus namespace.
hub_name: Name of the service bus notification hub in this namespace.
'''
response = self._perform_get(
self._get_get_supported_metrics_hub_path(name, hub_name),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricProperties))
def get_supported_metrics_relay(self, name, relay_name):
'''
Retrieves the list of supported metrics for this namespace and relay
name: Name of the service bus namespace.
relay_name: Name of the service bus relay in this namespace.
'''
response = self._perform_get(
self._get_get_supported_metrics_relay_path(name, relay_name),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricProperties))
def get_metrics_data_queue(self, name, queue_name, metric, rollup, filter_expresssion):
'''
Retrieves the list of supported metrics for this namespace and queue
name: Name of the service bus namespace.
queue_name: Name of the service bus queue in this namespace.
metric: name of a supported metric
rollup: name of a supported rollup
filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'"
'''
response = self._perform_get(
self._get_get_metrics_data_queue_path(name, queue_name, metric, rollup, filter_expresssion),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricValues))
def get_metrics_data_topic(self, name, topic_name, metric, rollup, filter_expresssion):
'''
Retrieves the list of supported metrics for this namespace and topic
name: Name of the service bus namespace.
topic_name: Name of the service bus queue in this namespace.
metric: name of a supported metric
rollup: name of a supported rollup
filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'"
'''
response = self._perform_get(
self._get_get_metrics_data_topic_path(name, topic_name, metric, rollup, filter_expresssion),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricValues))
def get_metrics_data_notification_hub(self, name, hub_name, metric, rollup, filter_expresssion):
'''
Retrieves the list of supported metrics for this namespace and topic
name: Name of the service bus namespace.
hub_name: Name of the service bus notification hub in this namespace.
metric: name of a supported metric
rollup: name of a supported rollup
filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'"
'''
response = self._perform_get(
self._get_get_metrics_data_hub_path(name, hub_name, metric, rollup, filter_expresssion),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricValues))
def get_metrics_data_relay(self, name, relay_name, metric, rollup, filter_expresssion):
'''
Retrieves the list of supported metrics for this namespace and relay
name: Name of the service bus namespace.
relay_name: Name of the service bus relay in this namespace.
metric: name of a supported metric
rollup: name of a supported rollup
filter_expression: filter, for instance "$filter=Timestamp gt datetime'2014-10-01T00:00:00Z'"
'''
response = self._perform_get(
self._get_get_metrics_data_relay_path(name, relay_name, metric, rollup, filter_expresssion),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricValues))
def get_metrics_rollups_queue(self, name, queue_name, metric):
'''
This operation gets rollup data for Service Bus metrics queue.
Rollup data includes the time granularity for the telemetry aggregation as well as
the retention settings for each time granularity.
name: Name of the service bus namespace.
queue_name: Name of the service bus queue in this namespace.
metric: name of a supported metric
'''
response = self._perform_get(
self._get_get_metrics_rollup_queue_path(name, queue_name, metric),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricRollups))
def get_metrics_rollups_topic(self, name, topic_name, metric):
'''
This operation gets rollup data for Service Bus metrics topic.
Rollup data includes the time granularity for the telemetry aggregation as well as
the retention settings for each time granularity.
name: Name of the service bus namespace.
topic_name: Name of the service bus queue in this namespace.
metric: name of a supported metric
'''
response = self._perform_get(
self._get_get_metrics_rollup_topic_path(name, topic_name, metric),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricRollups))
def get_metrics_rollups_notification_hub(self, name, hub_name, metric):
'''
This operation gets rollup data for Service Bus metrics notification hub.
Rollup data includes the time granularity for the telemetry aggregation as well as
the retention settings for each time granularity.
name: Name of the service bus namespace.
hub_name: Name of the service bus notification hub in this namespace.
metric: name of a supported metric
'''
response = self._perform_get(
self._get_get_metrics_rollup_hub_path(name, hub_name, metric),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricRollups))
def get_metrics_rollups_relay(self, name, relay_name, metric):
'''
This operation gets rollup data for Service Bus metrics relay.
Rollup data includes the time granularity for the telemetry aggregation as well as
the retention settings for each time granularity.
name: Name of the service bus namespace.
relay_name: Name of the service bus relay in this namespace.
metric: name of a supported metric
'''
response = self._perform_get(
self._get_get_metrics_rollup_relay_path(name, relay_name, metric),
None)
return _convert_response_to_feeds(response,
partial(_ServiceBusManagementXmlSerializer.xml_to_metrics,
object_type=MetricRollups))
# Helper functions --------------------------------------------------
def _get_list_queues_path(self, namespace_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/Queues'
def _get_list_topics_path(self, namespace_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/Topics'
def _get_list_notification_hubs_path(self, namespace_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/NotificationHubs'
def _get_list_relays_path(self, namespace_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/Relays'
def _get_get_supported_metrics_queue_path(self, namespace_name, queue_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/Queues/' + _str(queue_name) + '/Metrics'
def _get_get_supported_metrics_topic_path(self, namespace_name, topic_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/Topics/' + _str(topic_name) + '/Metrics'
def _get_get_supported_metrics_hub_path(self, namespace_name, hub_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/NotificationHubs/' + _str(hub_name) + '/Metrics'
def _get_get_supported_metrics_relay_path(self, namespace_name, queue_name):
return self._get_path('services/serviceBus/Namespaces/',
namespace_name) + '/Relays/' + _str(queue_name) + '/Metrics'
def _get_get_metrics_data_queue_path(self, namespace_name, queue_name, metric, rollup, filter_expr):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/Queues/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups/',
_str(rollup),
'/Values?',
filter_expr
])
def _get_get_metrics_data_topic_path(self, namespace_name, queue_name, metric, rollup, filter_expr):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/Topics/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups/',
_str(rollup),
'/Values?',
filter_expr
])
def _get_get_metrics_data_hub_path(self, namespace_name, queue_name, metric, rollup, filter_expr):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/NotificationHubs/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups/',
_str(rollup),
'/Values?',
filter_expr
])
def _get_get_metrics_data_relay_path(self, namespace_name, queue_name, metric, rollup, filter_expr):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/Relays/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups/',
_str(rollup),
'/Values?',
filter_expr
])
def _get_get_metrics_rollup_queue_path(self, namespace_name, queue_name, metric):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/Queues/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups',
])
def _get_get_metrics_rollup_topic_path(self, namespace_name, queue_name, metric):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/Topics/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups',
])
def _get_get_metrics_rollup_hub_path(self, namespace_name, queue_name, metric):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/NotificationHubs/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups',
])
def _get_get_metrics_rollup_relay_path(self, namespace_name, queue_name, metric):
return "".join([
self._get_path('services/serviceBus/Namespaces/', namespace_name),
'/Relays/',
_str(queue_name),
'/Metrics/',
_str(metric),
'/Rollups',
])

View File

@ -1,258 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import os
from azure import (
WindowsAzureError,
MANAGEMENT_HOST,
_get_request_body,
_parse_response,
_str,
_update_request_uri_query,
)
from azure.http import (
HTTPError,
HTTPRequest,
)
from azure.http.httpclient import _HTTPClient
from azure.servicemanagement import (
AZURE_MANAGEMENT_CERTFILE,
AZURE_MANAGEMENT_SUBSCRIPTIONID,
_management_error_handler,
parse_response_for_async_op,
X_MS_VERSION,
)
class _ServiceManagementClient(object):
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST, request_session=None):
self.requestid = None
self.subscription_id = subscription_id
self.cert_file = cert_file
self.host = host
self.request_session = request_session
self.x_ms_version = X_MS_VERSION
self.content_type = 'application/atom+xml;type=entry;charset=utf-8'
if not self.cert_file and not request_session:
if AZURE_MANAGEMENT_CERTFILE in os.environ:
self.cert_file = os.environ[AZURE_MANAGEMENT_CERTFILE]
if not self.subscription_id:
if AZURE_MANAGEMENT_SUBSCRIPTIONID in os.environ:
self.subscription_id = os.environ[
AZURE_MANAGEMENT_SUBSCRIPTIONID]
if not self.request_session:
if not self.cert_file or not self.subscription_id:
raise WindowsAzureError(
'You need to provide subscription id and certificate file')
self._httpclient = _HTTPClient(
service_instance=self, cert_file=self.cert_file,
request_session=self.request_session)
self._filter = self._httpclient.perform_request
def with_filter(self, filter):
'''Returns a new service which will process requests with the
specified filter. Filtering operations can include logging, automatic
retrying, etc... The filter is a lambda which receives the HTTPRequest
and another lambda. The filter can perform any pre-processing on the
request, pass it off to the next lambda, and then perform any
post-processing on the response.'''
res = type(self)(self.subscription_id, self.cert_file, self.host,
self.request_session)
old_filter = self._filter
def new_filter(request):
return filter(request, old_filter)
res._filter = new_filter
return res
def set_proxy(self, host, port, user=None, password=None):
'''
Sets the proxy server host and port for the HTTP CONNECT Tunnelling.
host: Address of the proxy. Ex: '192.168.0.100'
port: Port of the proxy. Ex: 6000
user: User for proxy authorization.
password: Password for proxy authorization.
'''
self._httpclient.set_proxy(host, port, user, password)
def perform_get(self, path, x_ms_version=None):
'''
Performs a GET request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self.host
request.path = path
request.path, request.query = _update_request_uri_query(request)
request.headers = self._update_management_header(request, x_ms_version)
response = self._perform_request(request)
return response
def perform_put(self, path, body, x_ms_version=None):
'''
Performs a PUT request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
body:
Body for the PUT request.
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
'''
request = HTTPRequest()
request.method = 'PUT'
request.host = self.host
request.path = path
request.body = _get_request_body(body)
request.path, request.query = _update_request_uri_query(request)
request.headers = self._update_management_header(request, x_ms_version)
response = self._perform_request(request)
return response
def perform_post(self, path, body, x_ms_version=None):
'''
Performs a POST request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
body:
Body for the POST request.
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
'''
request = HTTPRequest()
request.method = 'POST'
request.host = self.host
request.path = path
request.body = _get_request_body(body)
request.path, request.query = _update_request_uri_query(request)
request.headers = self._update_management_header(request, x_ms_version)
response = self._perform_request(request)
return response
def perform_delete(self, path, x_ms_version=None):
'''
Performs a DELETE request and returns the response.
path:
Path to the resource.
Ex: '/<subscription-id>/services/hostedservices/<service-name>'
x_ms_version:
If specified, this is used for the x-ms-version header.
Otherwise, self.x_ms_version is used.
'''
request = HTTPRequest()
request.method = 'DELETE'
request.host = self.host
request.path = path
request.path, request.query = _update_request_uri_query(request)
request.headers = self._update_management_header(request, x_ms_version)
response = self._perform_request(request)
return response
#--Helper functions --------------------------------------------------
def _perform_request(self, request):
try:
resp = self._filter(request)
except HTTPError as ex:
return _management_error_handler(ex)
return resp
def _update_management_header(self, request, x_ms_version):
''' Add additional headers for management. '''
if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']:
request.headers.append(('Content-Length', str(len(request.body))))
# append additional headers base on the service
request.headers.append(('x-ms-version', x_ms_version or self.x_ms_version))
# if it is not GET or HEAD request, must set content-type.
if not request.method in ['GET', 'HEAD']:
for name, _ in request.headers:
if 'content-type' == name.lower():
break
else:
request.headers.append(
('Content-Type',
self.content_type))
return request.headers
def _perform_get(self, path, response_type, x_ms_version=None):
response = self.perform_get(path, x_ms_version)
if response_type is not None:
return _parse_response(response, response_type)
return response
def _perform_put(self, path, body, async=False, x_ms_version=None):
response = self.perform_put(path, body, x_ms_version)
if async:
return parse_response_for_async_op(response)
return None
def _perform_post(self, path, body, response_type=None, async=False,
x_ms_version=None):
response = self.perform_post(path, body, x_ms_version)
if response_type is not None:
return _parse_response(response, response_type)
if async:
return parse_response_for_async_op(response)
return None
def _perform_delete(self, path, async=False, x_ms_version=None):
response = self.perform_delete(path, x_ms_version)
if async:
return parse_response_for_async_op(response)
return None
def _get_path(self, resource, name):
path = '/' + self.subscription_id + '/' + resource
if name is not None:
path += '/' + _str(name)
return path

View File

@ -1,390 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
MANAGEMENT_HOST,
_parse_service_resources_response,
_validate_not_none,
)
from azure.servicemanagement import (
EventLog,
ServerQuota,
Servers,
ServiceObjective,
Database,
FirewallRule,
_SqlManagementXmlSerializer,
)
from azure.servicemanagement.servicemanagementclient import (
_ServiceManagementClient,
)
class SqlDatabaseManagementService(_ServiceManagementClient):
''' Note that this class is a preliminary work on SQL Database
management. Since it lack a lot a features, final version
can be slightly different from the current one.
'''
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST, request_session=None):
'''
Initializes the sql database management service.
subscription_id: Subscription to manage.
cert_file:
Path to .pem certificate file (httplib), or location of the
certificate in your Personal certificate store (winhttp) in the
CURRENT_USER\my\CertificateName format.
If a request_session is specified, then this is unused.
host: Live ServiceClient URL. Defaults to Azure public cloud.
request_session:
Session object to use for http requests. If this is specified, it
replaces the default use of httplib or winhttp. Also, the cert_file
parameter is unused when a session is passed in.
The session object handles authentication, and as such can support
multiple types of authentication: .pem certificate, oauth.
For example, you can pass in a Session instance from the requests
library. To use .pem certificate authentication with requests
library, set the path to the .pem file on the session.cert
attribute.
'''
super(SqlDatabaseManagementService, self).__init__(
subscription_id, cert_file, host, request_session)
self.content_type = 'application/xml'
#--Operations for sql servers ----------------------------------------
def create_server(self, admin_login, admin_password, location):
'''
Create a new Azure SQL Database server.
admin_login: The administrator login name for the new server.
admin_password: The administrator login password for the new server.
location: The region to deploy the new server.
'''
_validate_not_none('admin_login', admin_login)
_validate_not_none('admin_password', admin_password)
_validate_not_none('location', location)
response = self.perform_post(
self._get_servers_path(),
_SqlManagementXmlSerializer.create_server_to_xml(
admin_login,
admin_password,
location
)
)
return _SqlManagementXmlSerializer.xml_to_create_server_response(
response.body)
def set_server_admin_password(self, server_name, admin_password):
'''
Reset the administrator password for a server.
server_name: Name of the server to change the password.
admin_password: The new administrator password for the server.
'''
_validate_not_none('server_name', server_name)
_validate_not_none('admin_password', admin_password)
return self._perform_post(
self._get_servers_path(server_name) + '?op=ResetPassword',
_SqlManagementXmlSerializer.set_server_admin_password_to_xml(
admin_password
)
)
def delete_server(self, server_name):
'''
Deletes an Azure SQL Database server (including all its databases).
server_name: Name of the server you want to delete.
'''
_validate_not_none('server_name', server_name)
return self._perform_delete(
self._get_servers_path(server_name))
def list_servers(self):
'''
List the SQL servers defined on the account.
'''
return self._perform_get(self._get_servers_path(),
Servers)
def list_quotas(self, server_name):
'''
Gets quotas for an Azure SQL Database Server.
server_name: Name of the server.
'''
_validate_not_none('server_name', server_name)
response = self._perform_get(self._get_quotas_path(server_name),
None)
return _parse_service_resources_response(response, ServerQuota)
def get_server_event_logs(self, server_name, start_date,
interval_size_in_minutes, event_types=''):
'''
Gets the event logs for an Azure SQL Database Server.
server_name: Name of the server to retrieve the event logs from.
start_date:
The starting date and time of the events to retrieve in UTC format,
for example '2011-09-28 16:05:00'.
interval_size_in_minutes:
Size of the event logs to retrieve (in minutes).
Valid values are: 5, 60, or 1440.
event_types:
The event type of the log entries you want to retrieve.
Valid values are:
- connection_successful
- connection_failed
- connection_terminated
- deadlock
- throttling
- throttling_long_transaction
To return all event types pass in an empty string.
'''
_validate_not_none('server_name', server_name)
_validate_not_none('start_date', start_date)
_validate_not_none('interval_size_in_minutes', interval_size_in_minutes)
_validate_not_none('event_types', event_types)
path = self._get_server_event_logs_path(server_name) + \
'?startDate={0}&intervalSizeInMinutes={1}&eventTypes={2}'.format(
start_date, interval_size_in_minutes, event_types)
response = self._perform_get(path, None)
return _parse_service_resources_response(response, EventLog)
#--Operations for firewall rules ------------------------------------------
def create_firewall_rule(self, server_name, name, start_ip_address,
end_ip_address):
'''
Creates an Azure SQL Database server firewall rule.
server_name: Name of the server to set the firewall rule on.
name: The name of the new firewall rule.
start_ip_address:
The lowest IP address in the range of the server-level firewall
setting. IP addresses equal to or greater than this can attempt to
connect to the server. The lowest possible IP address is 0.0.0.0.
end_ip_address:
The highest IP address in the range of the server-level firewall
setting. IP addresses equal to or less than this can attempt to
connect to the server. The highest possible IP address is
255.255.255.255.
'''
_validate_not_none('server_name', server_name)
_validate_not_none('name', name)
_validate_not_none('start_ip_address', start_ip_address)
_validate_not_none('end_ip_address', end_ip_address)
return self._perform_post(
self._get_firewall_rules_path(server_name),
_SqlManagementXmlSerializer.create_firewall_rule_to_xml(
name, start_ip_address, end_ip_address
)
)
def update_firewall_rule(self, server_name, name, start_ip_address,
end_ip_address):
'''
Update a firewall rule for an Azure SQL Database server.
server_name: Name of the server to set the firewall rule on.
name: The name of the firewall rule to update.
start_ip_address:
The lowest IP address in the range of the server-level firewall
setting. IP addresses equal to or greater than this can attempt to
connect to the server. The lowest possible IP address is 0.0.0.0.
end_ip_address:
The highest IP address in the range of the server-level firewall
setting. IP addresses equal to or less than this can attempt to
connect to the server. The highest possible IP address is
255.255.255.255.
'''
_validate_not_none('server_name', server_name)
_validate_not_none('name', name)
_validate_not_none('start_ip_address', start_ip_address)
_validate_not_none('end_ip_address', end_ip_address)
return self._perform_put(
self._get_firewall_rules_path(server_name, name),
_SqlManagementXmlSerializer.update_firewall_rule_to_xml(
name, start_ip_address, end_ip_address
)
)
def delete_firewall_rule(self, server_name, name):
'''
Deletes an Azure SQL Database server firewall rule.
server_name:
Name of the server with the firewall rule you want to delete.
name:
Name of the firewall rule you want to delete.
'''
_validate_not_none('server_name', server_name)
_validate_not_none('name', name)
return self._perform_delete(
self._get_firewall_rules_path(server_name, name))
def list_firewall_rules(self, server_name):
'''
Retrieves the set of firewall rules for an Azure SQL Database Server.
server_name: Name of the server.
'''
_validate_not_none('server_name', server_name)
response = self._perform_get(self._get_firewall_rules_path(server_name),
None)
return _parse_service_resources_response(response, FirewallRule)
def list_service_level_objectives(self, server_name):
'''
Gets the service level objectives for an Azure SQL Database server.
server_name: Name of the server.
'''
_validate_not_none('server_name', server_name)
response = self._perform_get(
self._get_service_objectives_path(server_name), None)
return _parse_service_resources_response(response, ServiceObjective)
#--Operations for sql databases ----------------------------------------
def create_database(self, server_name, name, service_objective_id,
edition=None, collation_name=None,
max_size_bytes=None):
'''
Creates a new Azure SQL Database.
server_name: Name of the server to contain the new database.
name:
Required. The name for the new database. See Naming Requirements
in Azure SQL Database General Guidelines and Limitations and
Database Identifiers for more information.
service_objective_id:
Required. The GUID corresponding to the performance level for
Edition. See List Service Level Objectives for current values.
edition:
Optional. The Service Tier (Edition) for the new database. If
omitted, the default is Web. Valid values are Web, Business,
Basic, Standard, and Premium. See Azure SQL Database Service Tiers
(Editions) and Web and Business Edition Sunset FAQ for more
information.
collation_name:
Optional. The database collation. This can be any collation
supported by SQL. If omitted, the default collation is used. See
SQL Server Collation Support in Azure SQL Database General
Guidelines and Limitations for more information.
max_size_bytes:
Optional. Sets the maximum size, in bytes, for the database. This
value must be within the range of allowed values for Edition. If
omitted, the default value for the edition is used. See Azure SQL
Database Service Tiers (Editions) for current maximum databases
sizes. Convert MB or GB values to bytes.
1 MB = 1048576 bytes. 1 GB = 1073741824 bytes.
'''
_validate_not_none('server_name', server_name)
_validate_not_none('name', name)
_validate_not_none('service_objective_id', service_objective_id)
return self._perform_post(
self._get_databases_path(server_name),
_SqlManagementXmlSerializer.create_database_to_xml(
name, service_objective_id, edition, collation_name,
max_size_bytes
)
)
def update_database(self, server_name, name, new_database_name=None,
service_objective_id=None, edition=None,
max_size_bytes=None):
'''
Updates existing database details.
server_name: Name of the server to contain the new database.
name:
Required. The name for the new database. See Naming Requirements
in Azure SQL Database General Guidelines and Limitations and
Database Identifiers for more information.
new_database_name:
Optional. The new name for the new database.
service_objective_id:
Optional. The new service level to apply to the database. For more
information about service levels, see Azure SQL Database Service
Tiers and Performance Levels. Use List Service Level Objectives to
get the correct ID for the desired service objective.
edition:
Optional. The new edition for the new database.
max_size_bytes:
Optional. The new size of the database in bytes. For information on
available sizes for each edition, see Azure SQL Database Service
Tiers (Editions).
'''
_validate_not_none('server_name', server_name)
_validate_not_none('name', name)
return self._perform_put(
self._get_databases_path(server_name, name),
_SqlManagementXmlSerializer.update_database_to_xml(
new_database_name, service_objective_id, edition,
max_size_bytes
)
)
def delete_database(self, server_name, name):
'''
Deletes an Azure SQL Database.
server_name: Name of the server where the database is located.
name: Name of the database to delete.
'''
return self._perform_delete(self._get_databases_path(server_name, name))
def list_databases(self, name):
'''
List the SQL databases defined on the specified server name
'''
response = self._perform_get(self._get_list_databases_path(name),
None)
return _parse_service_resources_response(response, Database)
#--Helper functions --------------------------------------------------
def _get_servers_path(self, server_name=None):
return self._get_path('services/sqlservers/servers', server_name)
def _get_firewall_rules_path(self, server_name, name=None):
path = self._get_servers_path(server_name) + '/firewallrules'
if name:
path = path + '/' + name
return path
def _get_databases_path(self, server_name, name=None):
path = self._get_servers_path(server_name) + '/databases'
if name:
path = path + '/' + name
return path
def _get_server_event_logs_path(self, server_name):
return self._get_servers_path(server_name) + '/events'
def _get_service_objectives_path(self, server_name):
return self._get_servers_path(server_name) + '/serviceobjectives'
def _get_quotas_path(self, server_name, name=None):
path = self._get_servers_path(server_name) + '/serverquotas'
if name:
path = path + '/' + name
return path
def _get_list_databases_path(self, name):
# *contentview=generic is mandatory*
return self._get_path('services/sqlservers/servers/',
name) + '/databases?contentview=generic'

View File

@ -1,256 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
MANAGEMENT_HOST,
_str,
)
from azure.servicemanagement import (
WebSpaces,
WebSpace,
Sites,
Site,
MetricResponses,
MetricDefinitions,
PublishData,
_XmlSerializer,
)
from azure.servicemanagement.servicemanagementclient import (
_ServiceManagementClient,
)
class WebsiteManagementService(_ServiceManagementClient):
''' Note that this class is a preliminary work on WebSite
management. Since it lack a lot a features, final version
can be slightly different from the current one.
'''
def __init__(self, subscription_id=None, cert_file=None,
host=MANAGEMENT_HOST, request_session=None):
'''
Initializes the website management service.
subscription_id: Subscription to manage.
cert_file:
Path to .pem certificate file (httplib), or location of the
certificate in your Personal certificate store (winhttp) in the
CURRENT_USER\my\CertificateName format.
If a request_session is specified, then this is unused.
host: Live ServiceClient URL. Defaults to Azure public cloud.
request_session:
Session object to use for http requests. If this is specified, it
replaces the default use of httplib or winhttp. Also, the cert_file
parameter is unused when a session is passed in.
The session object handles authentication, and as such can support
multiple types of authentication: .pem certificate, oauth.
For example, you can pass in a Session instance from the requests
library. To use .pem certificate authentication with requests
library, set the path to the .pem file on the session.cert
attribute.
'''
super(WebsiteManagementService, self).__init__(
subscription_id, cert_file, host, request_session)
#--Operations for web sites ----------------------------------------
def list_webspaces(self):
'''
List the webspaces defined on the account.
'''
return self._perform_get(self._get_list_webspaces_path(),
WebSpaces)
def get_webspace(self, webspace_name):
'''
Get details of a specific webspace.
webspace_name: The name of the webspace.
'''
return self._perform_get(self._get_webspace_details_path(webspace_name),
WebSpace)
def list_sites(self, webspace_name):
'''
List the web sites defined on this webspace.
webspace_name: The name of the webspace.
'''
return self._perform_get(self._get_sites_path(webspace_name),
Sites)
def get_site(self, webspace_name, website_name):
'''
List the web sites defined on this webspace.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_sites_details_path(webspace_name,
website_name),
Site)
def create_site(self, webspace_name, website_name, geo_region, host_names,
plan='VirtualDedicatedPlan', compute_mode='Shared',
server_farm=None, site_mode=None):
'''
Create a website.
webspace_name: The name of the webspace.
website_name: The name of the website.
geo_region:
The geographical region of the webspace that will be created.
host_names:
An array of fully qualified domain names for website. Only one
hostname can be specified in the azurewebsites.net domain.
The hostname should match the name of the website. Custom domains
can only be specified for Shared or Standard websites.
plan:
This value must be 'VirtualDedicatedPlan'.
compute_mode:
This value should be 'Shared' for the Free or Paid Shared
offerings, or 'Dedicated' for the Standard offering. The default
value is 'Shared'. If you set it to 'Dedicated', you must specify
a value for the server_farm parameter.
server_farm:
The name of the Server Farm associated with this website. This is
a required value for Standard mode.
site_mode:
Can be None, 'Limited' or 'Basic'. This value is 'Limited' for the
Free offering, and 'Basic' for the Paid Shared offering. Standard
mode does not use the site_mode parameter; it uses the compute_mode
parameter.
'''
xml = _XmlSerializer.create_website_to_xml(webspace_name, website_name, geo_region, plan, host_names, compute_mode, server_farm, site_mode)
return self._perform_post(
self._get_sites_path(webspace_name),
xml,
Site)
def delete_site(self, webspace_name, website_name,
delete_empty_server_farm=False, delete_metrics=False):
'''
Delete a website.
webspace_name: The name of the webspace.
website_name: The name of the website.
delete_empty_server_farm:
If the site being deleted is the last web site in a server farm,
you can delete the server farm by setting this to True.
delete_metrics:
To also delete the metrics for the site that you are deleting, you
can set this to True.
'''
path = self._get_sites_details_path(webspace_name, website_name)
query = ''
if delete_empty_server_farm:
query += '&deleteEmptyServerFarm=true'
if delete_metrics:
query += '&deleteMetrics=true'
if query:
path = path + '?' + query.lstrip('&')
return self._perform_delete(path)
def restart_site(self, webspace_name, website_name):
'''
Restart a web site.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_post(
self._get_restart_path(webspace_name, website_name),
'')
def get_historical_usage_metrics(self, webspace_name, website_name,
metrics = None, start_time=None, end_time=None, time_grain=None):
'''
Get historical usage metrics.
webspace_name: The name of the webspace.
website_name: The name of the website.
metrics: Optional. List of metrics name. Otherwise, all metrics returned.
start_time: Optional. An ISO8601 date. Otherwise, current hour is used.
end_time: Optional. An ISO8601 date. Otherwise, current time is used.
time_grain: Optional. A rollup name, as P1D. OTherwise, default rollup for the metrics is used.
More information and metrics name at:
http://msdn.microsoft.com/en-us/library/azure/dn166964.aspx
'''
metrics = ('names='+','.join(metrics)) if metrics else ''
start_time = ('StartTime='+start_time) if start_time else ''
end_time = ('EndTime='+end_time) if end_time else ''
time_grain = ('TimeGrain='+time_grain) if time_grain else ''
parameters = ('&'.join(v for v in (metrics, start_time, end_time, time_grain) if v))
parameters = '?'+parameters if parameters else ''
return self._perform_get(self._get_historical_usage_metrics_path(webspace_name, website_name) + parameters,
MetricResponses)
def get_metric_definitions(self, webspace_name, website_name):
'''
Get metric definitions of metrics available of this web site.
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_metric_definitions_path(webspace_name, website_name),
MetricDefinitions)
def get_publish_profile_xml(self, webspace_name, website_name):
'''
Get a site's publish profile as a string
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_publishxml_path(webspace_name, website_name),
None).body.decode("utf-8")
def get_publish_profile(self, webspace_name, website_name):
'''
Get a site's publish profile as an object
webspace_name: The name of the webspace.
website_name: The name of the website.
'''
return self._perform_get(self._get_publishxml_path(webspace_name, website_name),
PublishData)
#--Helper functions --------------------------------------------------
def _get_list_webspaces_path(self):
return self._get_path('services/webspaces', None)
def _get_webspace_details_path(self, webspace_name):
return self._get_path('services/webspaces/', webspace_name)
def _get_sites_path(self, webspace_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites'
def _get_sites_details_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name)
def _get_restart_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/restart/'
def _get_historical_usage_metrics_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/metrics/'
def _get_metric_definitions_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/metricdefinitions/'
def _get_publishxml_path(self, webspace_name, website_name):
return self._get_path('services/webspaces/',
webspace_name) + '/sites/' + _str(website_name) + '/publishxml/'

View File

@ -1,901 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import sys
import types
from datetime import datetime
from dateutil import parser
from dateutil.tz import tzutc
from xml.dom import minidom
from azure import (WindowsAzureData,
WindowsAzureError,
METADATA_NS,
xml_escape,
_create_entry,
_decode_base64_to_text,
_decode_base64_to_bytes,
_encode_base64,
_fill_data_minidom,
_fill_instance_element,
_get_child_nodes,
_get_child_nodesNS,
_get_children_from_path,
_get_entry_properties,
_general_error_handler,
_list_of,
_parse_response_for_dict,
_sign_string,
_unicode_type,
_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY,
)
# x-ms-version for storage service.
X_MS_VERSION = '2012-02-12'
class EnumResultsBase(object):
''' base class for EnumResults. '''
def __init__(self):
self.prefix = u''
self.marker = u''
self.max_results = 0
self.next_marker = u''
class ContainerEnumResults(EnumResultsBase):
''' Blob Container list. '''
def __init__(self):
EnumResultsBase.__init__(self)
self.containers = _list_of(Container)
def __iter__(self):
return iter(self.containers)
def __len__(self):
return len(self.containers)
def __getitem__(self, index):
return self.containers[index]
class Container(WindowsAzureData):
''' Blob container class. '''
def __init__(self):
self.name = u''
self.url = u''
self.properties = Properties()
self.metadata = {}
class Properties(WindowsAzureData):
''' Blob container's properties class. '''
def __init__(self):
self.last_modified = u''
self.etag = u''
class RetentionPolicy(WindowsAzureData):
''' RetentionPolicy in service properties. '''
def __init__(self):
self.enabled = False
self.__dict__['days'] = None
def get_days(self):
# convert days to int value
return int(self.__dict__['days'])
def set_days(self, value):
''' set default days if days is set to empty. '''
self.__dict__['days'] = value
days = property(fget=get_days, fset=set_days)
class Logging(WindowsAzureData):
''' Logging class in service properties. '''
def __init__(self):
self.version = u'1.0'
self.delete = False
self.read = False
self.write = False
self.retention_policy = RetentionPolicy()
class Metrics(WindowsAzureData):
''' Metrics class in service properties. '''
def __init__(self):
self.version = u'1.0'
self.enabled = False
self.include_apis = None
self.retention_policy = RetentionPolicy()
class StorageServiceProperties(WindowsAzureData):
''' Storage Service Propeties class. '''
def __init__(self):
self.logging = Logging()
self.metrics = Metrics()
class AccessPolicy(WindowsAzureData):
''' Access Policy class in service properties. '''
def __init__(self, start=u'', expiry=u'', permission='u'):
self.start = start
self.expiry = expiry
self.permission = permission
class SignedIdentifier(WindowsAzureData):
''' Signed Identifier class for service properties. '''
def __init__(self):
self.id = u''
self.access_policy = AccessPolicy()
class SignedIdentifiers(WindowsAzureData):
''' SignedIdentifier list. '''
def __init__(self):
self.signed_identifiers = _list_of(SignedIdentifier)
def __iter__(self):
return iter(self.signed_identifiers)
def __len__(self):
return len(self.signed_identifiers)
def __getitem__(self, index):
return self.signed_identifiers[index]
class BlobEnumResults(EnumResultsBase):
''' Blob list.'''
def __init__(self):
EnumResultsBase.__init__(self)
self.blobs = _list_of(Blob)
self.prefixes = _list_of(BlobPrefix)
self.delimiter = ''
def __iter__(self):
return iter(self.blobs)
def __len__(self):
return len(self.blobs)
def __getitem__(self, index):
return self.blobs[index]
class BlobResult(bytes):
def __new__(cls, blob, properties):
return bytes.__new__(cls, blob if blob else b'')
def __init__(self, blob, properties):
self.properties = properties
class Blob(WindowsAzureData):
''' Blob class. '''
def __init__(self):
self.name = u''
self.snapshot = u''
self.url = u''
self.properties = BlobProperties()
self.metadata = {}
class BlobProperties(WindowsAzureData):
''' Blob Properties '''
def __init__(self):
self.last_modified = u''
self.etag = u''
self.content_length = 0
self.content_type = u''
self.content_encoding = u''
self.content_language = u''
self.content_md5 = u''
self.xms_blob_sequence_number = 0
self.blob_type = u''
self.lease_status = u''
self.lease_state = u''
self.lease_duration = u''
self.copy_id = u''
self.copy_source = u''
self.copy_status = u''
self.copy_progress = u''
self.copy_completion_time = u''
self.copy_status_description = u''
class BlobPrefix(WindowsAzureData):
''' BlobPrefix in Blob. '''
def __init__(self):
self.name = ''
class BlobBlock(WindowsAzureData):
''' BlobBlock class '''
def __init__(self, id=None, size=None):
self.id = id
self.size = size
class BlobBlockList(WindowsAzureData):
''' BlobBlockList class '''
def __init__(self):
self.committed_blocks = []
self.uncommitted_blocks = []
class PageRange(WindowsAzureData):
''' Page Range for page blob. '''
def __init__(self):
self.start = 0
self.end = 0
class PageList(object):
''' Page list for page blob. '''
def __init__(self):
self.page_ranges = _list_of(PageRange)
def __iter__(self):
return iter(self.page_ranges)
def __len__(self):
return len(self.page_ranges)
def __getitem__(self, index):
return self.page_ranges[index]
class QueueEnumResults(EnumResultsBase):
''' Queue list'''
def __init__(self):
EnumResultsBase.__init__(self)
self.queues = _list_of(Queue)
def __iter__(self):
return iter(self.queues)
def __len__(self):
return len(self.queues)
def __getitem__(self, index):
return self.queues[index]
class Queue(WindowsAzureData):
''' Queue class '''
def __init__(self):
self.name = u''
self.url = u''
self.metadata = {}
class QueueMessagesList(WindowsAzureData):
''' Queue message list. '''
def __init__(self):
self.queue_messages = _list_of(QueueMessage)
def __iter__(self):
return iter(self.queue_messages)
def __len__(self):
return len(self.queue_messages)
def __getitem__(self, index):
return self.queue_messages[index]
class QueueMessage(WindowsAzureData):
''' Queue message class. '''
def __init__(self):
self.message_id = u''
self.insertion_time = u''
self.expiration_time = u''
self.pop_receipt = u''
self.time_next_visible = u''
self.dequeue_count = u''
self.message_text = u''
class Entity(WindowsAzureData):
''' Entity class. The attributes of entity will be created dynamically. '''
pass
class EntityProperty(WindowsAzureData):
''' Entity property. contains type and value. '''
def __init__(self, type=None, value=None):
self.type = type
self.value = value
class Table(WindowsAzureData):
''' Only for intellicens and telling user the return type. '''
pass
def _parse_blob_enum_results_list(response):
respbody = response.body
return_obj = BlobEnumResults()
doc = minidom.parseString(respbody)
for enum_results in _get_child_nodes(doc, 'EnumerationResults'):
for child in _get_children_from_path(enum_results, 'Blobs', 'Blob'):
return_obj.blobs.append(_fill_instance_element(child, Blob))
for child in _get_children_from_path(enum_results,
'Blobs',
'BlobPrefix'):
return_obj.prefixes.append(
_fill_instance_element(child, BlobPrefix))
for name, value in vars(return_obj).items():
if name == 'blobs' or name == 'prefixes':
continue
value = _fill_data_minidom(enum_results, name, value)
if value is not None:
setattr(return_obj, name, value)
return return_obj
def _update_storage_header(request):
''' add additional headers for storage request. '''
if request.body:
assert isinstance(request.body, bytes)
# if it is PUT, POST, MERGE, DELETE, need to add content-length to header.
if request.method in ['PUT', 'POST', 'MERGE', 'DELETE']:
request.headers.append(('Content-Length', str(len(request.body))))
# append addtional headers base on the service
request.headers.append(('x-ms-version', X_MS_VERSION))
# append x-ms-meta name, values to header
for name, value in request.headers:
if 'x-ms-meta-name-values' in name and value:
for meta_name, meta_value in value.items():
request.headers.append(('x-ms-meta-' + meta_name, meta_value))
request.headers.remove((name, value))
break
return request
def _update_storage_blob_header(request, account_name, account_key):
''' add additional headers for storage blob request. '''
request = _update_storage_header(request)
current_time = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
request.headers.append(('x-ms-date', current_time))
request.headers.append(
('Content-Type', 'application/octet-stream Charset=UTF-8'))
request.headers.append(('Authorization',
_sign_storage_blob_request(request,
account_name,
account_key)))
return request.headers
def _update_storage_queue_header(request, account_name, account_key):
''' add additional headers for storage queue request. '''
return _update_storage_blob_header(request, account_name, account_key)
def _update_storage_table_header(request):
''' add additional headers for storage table request. '''
request = _update_storage_header(request)
for name, _ in request.headers:
if name.lower() == 'content-type':
break
else:
request.headers.append(('Content-Type', 'application/atom+xml'))
request.headers.append(('DataServiceVersion', '2.0;NetFx'))
request.headers.append(('MaxDataServiceVersion', '2.0;NetFx'))
current_time = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
request.headers.append(('x-ms-date', current_time))
request.headers.append(('Date', current_time))
return request.headers
def _sign_storage_blob_request(request, account_name, account_key):
'''
Returns the signed string for blob request which is used to set
Authorization header. This is also used to sign queue request.
'''
uri_path = request.path.split('?')[0]
# method to sign
string_to_sign = request.method + '\n'
# get headers to sign
headers_to_sign = [
'content-encoding', 'content-language', 'content-length',
'content-md5', 'content-type', 'date', 'if-modified-since',
'if-match', 'if-none-match', 'if-unmodified-since', 'range']
request_header_dict = dict((name.lower(), value)
for name, value in request.headers if value)
string_to_sign += '\n'.join(request_header_dict.get(x, '')
for x in headers_to_sign) + '\n'
# get x-ms header to sign
x_ms_headers = []
for name, value in request.headers:
if 'x-ms' in name:
x_ms_headers.append((name.lower(), value))
x_ms_headers.sort()
for name, value in x_ms_headers:
if value:
string_to_sign += ''.join([name, ':', value, '\n'])
# get account_name and uri path to sign
string_to_sign += '/' + account_name + uri_path
# get query string to sign if it is not table service
query_to_sign = request.query
query_to_sign.sort()
current_name = ''
for name, value in query_to_sign:
if value:
if current_name != name:
string_to_sign += '\n' + name + ':' + value
else:
string_to_sign += '\n' + ',' + value
# sign the request
auth_string = 'SharedKey ' + account_name + ':' + \
_sign_string(account_key, string_to_sign)
return auth_string
def _sign_storage_table_request(request, account_name, account_key):
uri_path = request.path.split('?')[0]
string_to_sign = request.method + '\n'
headers_to_sign = ['content-md5', 'content-type', 'date']
request_header_dict = dict((name.lower(), value)
for name, value in request.headers if value)
string_to_sign += '\n'.join(request_header_dict.get(x, '')
for x in headers_to_sign) + '\n'
# get account_name and uri path to sign
string_to_sign += ''.join(['/', account_name, uri_path])
for name, value in request.query:
if name == 'comp' and uri_path == '/':
string_to_sign += '?comp=' + value
break
# sign the request
auth_string = 'SharedKey ' + account_name + ':' + \
_sign_string(account_key, string_to_sign)
return auth_string
def _to_python_bool(value):
if value.lower() == 'true':
return True
return False
def _to_entity_int(data):
int_max = (2 << 30) - 1
if data > (int_max) or data < (int_max + 1) * (-1):
return 'Edm.Int64', str(data)
else:
return 'Edm.Int32', str(data)
def _to_entity_bool(value):
if value:
return 'Edm.Boolean', 'true'
return 'Edm.Boolean', 'false'
def _to_entity_datetime(value):
# Azure expects the date value passed in to be UTC.
# Azure will always return values as UTC.
# If a date is passed in without timezone info, it is assumed to be UTC.
if value.tzinfo:
value = value.astimezone(tzutc())
return 'Edm.DateTime', value.strftime('%Y-%m-%dT%H:%M:%SZ')
def _to_entity_float(value):
return 'Edm.Double', str(value)
def _to_entity_property(value):
if value.type == 'Edm.Binary':
return value.type, _encode_base64(value.value)
return value.type, str(value.value)
def _to_entity_none(value):
return None, None
def _to_entity_str(value):
return 'Edm.String', value
# Tables of conversions to and from entity types. We support specific
# datatypes, and beyond that the user can use an EntityProperty to get
# custom data type support.
def _from_entity_binary(value):
return EntityProperty('Edm.Binary', _decode_base64_to_bytes(value))
def _from_entity_int(value):
return int(value)
def _from_entity_datetime(value):
# Note that Azure always returns UTC datetime, and dateutil parser
# will set the tzinfo on the date it returns
return parser.parse(value)
_ENTITY_TO_PYTHON_CONVERSIONS = {
'Edm.Binary': _from_entity_binary,
'Edm.Int32': _from_entity_int,
'Edm.Int64': _from_entity_int,
'Edm.Double': float,
'Edm.Boolean': _to_python_bool,
'Edm.DateTime': _from_entity_datetime,
}
# Conversion from Python type to a function which returns a tuple of the
# type string and content string.
_PYTHON_TO_ENTITY_CONVERSIONS = {
int: _to_entity_int,
bool: _to_entity_bool,
datetime: _to_entity_datetime,
float: _to_entity_float,
EntityProperty: _to_entity_property,
str: _to_entity_str,
}
if sys.version_info < (3,):
_PYTHON_TO_ENTITY_CONVERSIONS.update({
long: _to_entity_int,
types.NoneType: _to_entity_none,
unicode: _to_entity_str,
})
def _convert_entity_to_xml(source):
''' Converts an entity object to xml to send.
The entity format is:
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom">
<title />
<updated>2008-09-18T23:46:19.3857256Z</updated>
<author>
<name />
</author>
<id />
<content type="application/xml">
<m:properties>
<d:Address>Mountain View</d:Address>
<d:Age m:type="Edm.Int32">23</d:Age>
<d:AmountDue m:type="Edm.Double">200.23</d:AmountDue>
<d:BinaryData m:type="Edm.Binary" m:null="true" />
<d:CustomerCode m:type="Edm.Guid">c9da6455-213d-42c9-9a79-3e9149a57833</d:CustomerCode>
<d:CustomerSince m:type="Edm.DateTime">2008-07-10T00:00:00</d:CustomerSince>
<d:IsActive m:type="Edm.Boolean">true</d:IsActive>
<d:NumOfOrders m:type="Edm.Int64">255</d:NumOfOrders>
<d:PartitionKey>mypartitionkey</d:PartitionKey>
<d:RowKey>myrowkey1</d:RowKey>
<d:Timestamp m:type="Edm.DateTime">0001-01-01T00:00:00</d:Timestamp>
</m:properties>
</content>
</entry>
'''
# construct the entity body included in <m:properties> and </m:properties>
entity_body = '<m:properties xml:space="preserve">{properties}</m:properties>'
if isinstance(source, WindowsAzureData):
source = vars(source)
properties_str = ''
# set properties type for types we know if value has no type info.
# if value has type info, then set the type to value.type
for name, value in source.items():
mtype = ''
conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(type(value))
if conv is None and sys.version_info >= (3,) and value is None:
conv = _to_entity_none
if conv is None:
raise WindowsAzureError(
_ERROR_CANNOT_SERIALIZE_VALUE_TO_ENTITY.format(
type(value).__name__))
mtype, value = conv(value)
# form the property node
properties_str += ''.join(['<d:', name])
if value is None:
properties_str += ' m:null="true" />'
else:
if mtype:
properties_str += ''.join([' m:type="', mtype, '"'])
properties_str += ''.join(['>',
xml_escape(value), '</d:', name, '>'])
if sys.version_info < (3,):
if isinstance(properties_str, unicode):
properties_str = properties_str.encode('utf-8')
# generate the entity_body
entity_body = entity_body.format(properties=properties_str)
xmlstr = _create_entry(entity_body)
return xmlstr
def _convert_table_to_xml(table_name):
'''
Create xml to send for a given table name. Since xml format for table is
the same as entity and the only difference is that table has only one
property 'TableName', so we just call _convert_entity_to_xml.
table_name: the name of the table
'''
return _convert_entity_to_xml({'TableName': table_name})
def _convert_block_list_to_xml(block_id_list):
'''
Convert a block list to xml to send.
block_id_list:
a str list containing the block ids that are used in put_block_list.
Only get block from latest blocks.
'''
if block_id_list is None:
return ''
xml = '<?xml version="1.0" encoding="utf-8"?><BlockList>'
for value in block_id_list:
xml += '<Latest>{0}</Latest>'.format(_encode_base64(value))
return xml + '</BlockList>'
def _create_blob_result(response):
blob_properties = _parse_response_for_dict(response)
return BlobResult(response.body, blob_properties)
def _convert_response_to_block_list(response):
'''
Converts xml response to block list class.
'''
blob_block_list = BlobBlockList()
xmldoc = minidom.parseString(response.body)
for xml_block in _get_children_from_path(xmldoc,
'BlockList',
'CommittedBlocks',
'Block'):
xml_block_id = _decode_base64_to_text(
_get_child_nodes(xml_block, 'Name')[0].firstChild.nodeValue)
xml_block_size = int(
_get_child_nodes(xml_block, 'Size')[0].firstChild.nodeValue)
blob_block_list.committed_blocks.append(
BlobBlock(xml_block_id, xml_block_size))
for xml_block in _get_children_from_path(xmldoc,
'BlockList',
'UncommittedBlocks',
'Block'):
xml_block_id = _decode_base64_to_text(
_get_child_nodes(xml_block, 'Name')[0].firstChild.nodeValue)
xml_block_size = int(
_get_child_nodes(xml_block, 'Size')[0].firstChild.nodeValue)
blob_block_list.uncommitted_blocks.append(
BlobBlock(xml_block_id, xml_block_size))
return blob_block_list
def _remove_prefix(name):
colon = name.find(':')
if colon != -1:
return name[colon + 1:]
return name
def _convert_response_to_entity(response):
if response is None:
return response
return _convert_xml_to_entity(response.body)
def _convert_xml_to_entity(xmlstr):
''' Convert xml response to entity.
The format of entity:
<entry xmlns:d="http://schemas.microsoft.com/ado/2007/08/dataservices" xmlns:m="http://schemas.microsoft.com/ado/2007/08/dataservices/metadata" xmlns="http://www.w3.org/2005/Atom">
<title />
<updated>2008-09-18T23:46:19.3857256Z</updated>
<author>
<name />
</author>
<id />
<content type="application/xml">
<m:properties>
<d:Address>Mountain View</d:Address>
<d:Age m:type="Edm.Int32">23</d:Age>
<d:AmountDue m:type="Edm.Double">200.23</d:AmountDue>
<d:BinaryData m:type="Edm.Binary" m:null="true" />
<d:CustomerCode m:type="Edm.Guid">c9da6455-213d-42c9-9a79-3e9149a57833</d:CustomerCode>
<d:CustomerSince m:type="Edm.DateTime">2008-07-10T00:00:00</d:CustomerSince>
<d:IsActive m:type="Edm.Boolean">true</d:IsActive>
<d:NumOfOrders m:type="Edm.Int64">255</d:NumOfOrders>
<d:PartitionKey>mypartitionkey</d:PartitionKey>
<d:RowKey>myrowkey1</d:RowKey>
<d:Timestamp m:type="Edm.DateTime">0001-01-01T00:00:00</d:Timestamp>
</m:properties>
</content>
</entry>
'''
xmldoc = minidom.parseString(xmlstr)
xml_properties = None
for entry in _get_child_nodes(xmldoc, 'entry'):
for content in _get_child_nodes(entry, 'content'):
# TODO: Namespace
xml_properties = _get_child_nodesNS(
content, METADATA_NS, 'properties')
if not xml_properties:
return None
entity = Entity()
# extract each property node and get the type from attribute and node value
for xml_property in xml_properties[0].childNodes:
name = _remove_prefix(xml_property.nodeName)
if xml_property.firstChild:
value = xml_property.firstChild.nodeValue
else:
value = ''
isnull = xml_property.getAttributeNS(METADATA_NS, 'null')
mtype = xml_property.getAttributeNS(METADATA_NS, 'type')
# if not isnull and no type info, then it is a string and we just
# need the str type to hold the property.
if not isnull and not mtype:
_set_entity_attr(entity, name, value)
elif isnull == 'true':
if mtype:
property = EntityProperty(mtype, None)
else:
property = EntityProperty('Edm.String', None)
else: # need an object to hold the property
conv = _ENTITY_TO_PYTHON_CONVERSIONS.get(mtype)
if conv is not None:
property = conv(value)
else:
property = EntityProperty(mtype, value)
_set_entity_attr(entity, name, property)
# extract id, updated and name value from feed entry and set them of
# rule.
for name, value in _get_entry_properties(xmlstr, True).items():
if name in ['etag']:
_set_entity_attr(entity, name, value)
return entity
def _set_entity_attr(entity, name, value):
try:
setattr(entity, name, value)
except UnicodeEncodeError:
# Python 2 doesn't support unicode attribute names, so we'll
# add them and access them directly through the dictionary
entity.__dict__[name] = value
def _convert_xml_to_table(xmlstr):
''' Converts the xml response to table class.
Simply call convert_xml_to_entity and extract the table name, and add
updated and author info
'''
table = Table()
entity = _convert_xml_to_entity(xmlstr)
setattr(table, 'name', entity.TableName)
for name, value in _get_entry_properties(xmlstr, False).items():
setattr(table, name, value)
return table
def _storage_error_handler(http_error):
''' Simple error handler for storage service. '''
return _general_error_handler(http_error)
# make these available just from storage.
from azure.storage.blobservice import BlobService
from azure.storage.queueservice import QueueService
from azure.storage.tableservice import TableService
from azure.storage.cloudstorageaccount import CloudStorageAccount
from azure.storage.sharedaccesssignature import (
SharedAccessSignature,
SharedAccessPolicy,
Permission,
WebResource,
)

File diff suppressed because it is too large Load Diff

View File

@ -1,39 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure.storage.blobservice import BlobService
from azure.storage.tableservice import TableService
from azure.storage.queueservice import QueueService
class CloudStorageAccount(object):
"""
Provides a factory for creating the blob, queue, and table services
with a common account name and account key. Users can either use the
factory or can construct the appropriate service directly.
"""
def __init__(self, account_name=None, account_key=None):
self.account_name = account_name
self.account_key = account_key
def create_blob_service(self):
return BlobService(self.account_name, self.account_key)
def create_table_service(self):
return TableService(self.account_name, self.account_key)
def create_queue_service(self):
return QueueService(self.account_name, self.account_key)

View File

@ -1,458 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
WindowsAzureConflictError,
WindowsAzureError,
DEV_QUEUE_HOST,
QUEUE_SERVICE_HOST_BASE,
xml_escape,
_convert_class_to_xml,
_dont_fail_not_exist,
_dont_fail_on_exist,
_get_request_body,
_int_or_none,
_parse_enum_results_list,
_parse_response,
_parse_response_for_dict_filter,
_parse_response_for_dict_prefix,
_str,
_str_or_none,
_update_request_uri_query_local_storage,
_validate_not_none,
_ERROR_CONFLICT,
)
from azure.http import (
HTTPRequest,
HTTP_RESPONSE_NO_CONTENT,
)
from azure.storage import (
Queue,
QueueEnumResults,
QueueMessagesList,
StorageServiceProperties,
_update_storage_queue_header,
)
from azure.storage.storageclient import _StorageClient
class QueueService(_StorageClient):
'''
This is the main class managing queue resources.
'''
def __init__(self, account_name=None, account_key=None, protocol='https',
host_base=QUEUE_SERVICE_HOST_BASE, dev_host=DEV_QUEUE_HOST):
'''
account_name: your storage account name, required for all operations.
account_key: your storage account key, required for all operations.
protocol: Optional. Protocol. Defaults to http.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
dev_host: Optional. Dev host url. Defaults to localhost.
'''
super(QueueService, self).__init__(
account_name, account_key, protocol, host_base, dev_host)
def get_queue_service_properties(self, timeout=None):
'''
Gets the properties of a storage account's Queue Service, including
Windows Azure Storage Analytics.
timeout: Optional. The timeout parameter is expressed in seconds.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/?restype=service&comp=properties'
request.query = [('timeout', _int_or_none(timeout))]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
response = self._perform_request(request)
return _parse_response(response, StorageServiceProperties)
def list_queues(self, prefix=None, marker=None, maxresults=None,
include=None):
'''
Lists all of the queues in a given storage account.
prefix:
Filters the results to return only queues with names that begin
with the specified prefix.
marker:
A string value that identifies the portion of the list to be
returned with the next list operation. The operation returns a
NextMarker element within the response body if the list returned
was not complete. This value may then be used as a query parameter
in a subsequent call to request the next portion of the list of
queues. The marker value is opaque to the client.
maxresults:
Specifies the maximum number of queues to return. If maxresults is
not specified, the server will return up to 5,000 items.
include:
Optional. Include this parameter to specify that the container's
metadata be returned as part of the response body.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/?comp=list'
request.query = [
('prefix', _str_or_none(prefix)),
('marker', _str_or_none(marker)),
('maxresults', _int_or_none(maxresults)),
('include', _str_or_none(include))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
response = self._perform_request(request)
return _parse_enum_results_list(
response, QueueEnumResults, "Queues", Queue)
def create_queue(self, queue_name, x_ms_meta_name_values=None,
fail_on_exist=False):
'''
Creates a queue under the given account.
queue_name: name of the queue.
x_ms_meta_name_values:
Optional. A dict containing name-value pairs to associate with the
queue as metadata.
fail_on_exist: Specify whether throw exception when queue exists.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + ''
request.headers = [('x-ms-meta-name-values', x_ms_meta_name_values)]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
if not fail_on_exist:
try:
response = self._perform_request(request)
if response.status == HTTP_RESPONSE_NO_CONTENT:
return False
return True
except WindowsAzureError as ex:
_dont_fail_on_exist(ex)
return False
else:
response = self._perform_request(request)
if response.status == HTTP_RESPONSE_NO_CONTENT:
raise WindowsAzureConflictError(
_ERROR_CONFLICT.format(response.message))
return True
def delete_queue(self, queue_name, fail_not_exist=False):
'''
Permanently deletes the specified queue.
queue_name: Name of the queue.
fail_not_exist:
Specify whether throw exception when queue doesn't exist.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + ''
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except WindowsAzureError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_queue_metadata(self, queue_name):
'''
Retrieves user-defined metadata and queue properties on the specified
queue. Metadata is associated with the queue as name-values pairs.
queue_name: Name of the queue.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '?comp=metadata'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
response = self._perform_request(request)
return _parse_response_for_dict_prefix(
response,
prefixes=['x-ms-meta', 'x-ms-approximate-messages-count'])
def set_queue_metadata(self, queue_name, x_ms_meta_name_values=None):
'''
Sets user-defined metadata on the specified queue. Metadata is
associated with the queue as name-value pairs.
queue_name: Name of the queue.
x_ms_meta_name_values:
Optional. A dict containing name-value pairs to associate with the
queue as metadata.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '?comp=metadata'
request.headers = [('x-ms-meta-name-values', x_ms_meta_name_values)]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
self._perform_request(request)
def put_message(self, queue_name, message_text, visibilitytimeout=None,
messagettl=None):
'''
Adds a new message to the back of the message queue. A visibility
timeout can also be specified to make the message invisible until the
visibility timeout expires. A message must be in a format that can be
included in an XML request with UTF-8 encoding. The encoded message can
be up to 64KB in size for versions 2011-08-18 and newer, or 8KB in size
for previous versions.
queue_name: Name of the queue.
message_text: Message content.
visibilitytimeout:
Optional. If not specified, the default value is 0. Specifies the
new visibility timeout value, in seconds, relative to server time.
The new value must be larger than or equal to 0, and cannot be
larger than 7 days. The visibility timeout of a message cannot be
set to a value later than the expiry time. visibilitytimeout
should be set to a value smaller than the time-to-live value.
messagettl:
Optional. Specifies the time-to-live interval for the message, in
seconds. The maximum time-to-live allowed is 7 days. If this
parameter is omitted, the default time-to-live is 7 days.
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('message_text', message_text)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages'
request.query = [
('visibilitytimeout', _str_or_none(visibilitytimeout)),
('messagettl', _str_or_none(messagettl))
]
request.body = _get_request_body(
'<?xml version="1.0" encoding="utf-8"?> \
<QueueMessage> \
<MessageText>' + xml_escape(_str(message_text)) + '</MessageText> \
</QueueMessage>')
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
self._perform_request(request)
def get_messages(self, queue_name, numofmessages=None,
visibilitytimeout=None):
'''
Retrieves one or more messages from the front of the queue.
queue_name: Name of the queue.
numofmessages:
Optional. A nonzero integer value that specifies the number of
messages to retrieve from the queue, up to a maximum of 32. If
fewer are visible, the visible messages are returned. By default,
a single message is retrieved from the queue with this operation.
visibilitytimeout:
Specifies the new visibility timeout value, in seconds, relative
to server time. The new value must be larger than or equal to 1
second, and cannot be larger than 7 days, or larger than 2 hours
on REST protocol versions prior to version 2011-08-18. The
visibility timeout of a message can be set to a value later than
the expiry time.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages'
request.query = [
('numofmessages', _str_or_none(numofmessages)),
('visibilitytimeout', _str_or_none(visibilitytimeout))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
response = self._perform_request(request)
return _parse_response(response, QueueMessagesList)
def peek_messages(self, queue_name, numofmessages=None):
'''
Retrieves one or more messages from the front of the queue, but does
not alter the visibility of the message.
queue_name: Name of the queue.
numofmessages:
Optional. A nonzero integer value that specifies the number of
messages to peek from the queue, up to a maximum of 32. By default,
a single message is peeked from the queue with this operation.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages?peekonly=true'
request.query = [('numofmessages', _str_or_none(numofmessages))]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
response = self._perform_request(request)
return _parse_response(response, QueueMessagesList)
def delete_message(self, queue_name, message_id, popreceipt):
'''
Deletes the specified message.
queue_name: Name of the queue.
message_id: Message to delete.
popreceipt:
Required. A valid pop receipt value returned from an earlier call
to the Get Messages or Update Message operation.
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('message_id', message_id)
_validate_not_none('popreceipt', popreceipt)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + \
_str(queue_name) + '/messages/' + _str(message_id) + ''
request.query = [('popreceipt', _str_or_none(popreceipt))]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
self._perform_request(request)
def clear_messages(self, queue_name):
'''
Deletes all messages from the specified queue.
queue_name: Name of the queue.
'''
_validate_not_none('queue_name', queue_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + _str(queue_name) + '/messages'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
self._perform_request(request)
def update_message(self, queue_name, message_id, message_text, popreceipt,
visibilitytimeout):
'''
Updates the visibility timeout of a message. You can also use this
operation to update the contents of a message.
queue_name: Name of the queue.
message_id: Message to update.
message_text: Content of message.
popreceipt:
Required. A valid pop receipt value returned from an earlier call
to the Get Messages or Update Message operation.
visibilitytimeout:
Required. Specifies the new visibility timeout value, in seconds,
relative to server time. The new value must be larger than or equal
to 0, and cannot be larger than 7 days. The visibility timeout of a
message cannot be set to a value later than the expiry time. A
message can be updated until it has been deleted or has expired.
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('message_id', message_id)
_validate_not_none('message_text', message_text)
_validate_not_none('popreceipt', popreceipt)
_validate_not_none('visibilitytimeout', visibilitytimeout)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + \
_str(queue_name) + '/messages/' + _str(message_id) + ''
request.query = [
('popreceipt', _str_or_none(popreceipt)),
('visibilitytimeout', _str_or_none(visibilitytimeout))
]
request.body = _get_request_body(
'<?xml version="1.0" encoding="utf-8"?> \
<QueueMessage> \
<MessageText>' + xml_escape(_str(message_text)) + '</MessageText> \
</QueueMessage>')
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
response = self._perform_request(request)
return _parse_response_for_dict_filter(
response,
filter=['x-ms-popreceipt', 'x-ms-time-next-visible'])
def set_queue_service_properties(self, storage_service_properties,
timeout=None):
'''
Sets the properties of a storage account's Queue service, including
Windows Azure Storage Analytics.
storage_service_properties: StorageServiceProperties object.
timeout: Optional. The timeout parameter is expressed in seconds.
'''
_validate_not_none('storage_service_properties',
storage_service_properties)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/?restype=service&comp=properties'
request.query = [('timeout', _int_or_none(timeout))]
request.body = _get_request_body(
_convert_class_to_xml(storage_service_properties))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_queue_header(
request, self.account_name, self.account_key)
self._perform_request(request)

View File

@ -1,231 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import _sign_string, url_quote
from azure.storage import X_MS_VERSION
#-------------------------------------------------------------------------
# Constants for the share access signature
SIGNED_VERSION = 'sv'
SIGNED_START = 'st'
SIGNED_EXPIRY = 'se'
SIGNED_RESOURCE = 'sr'
SIGNED_PERMISSION = 'sp'
SIGNED_IDENTIFIER = 'si'
SIGNED_SIGNATURE = 'sig'
SIGNED_VERSION = 'sv'
RESOURCE_BLOB = 'b'
RESOURCE_CONTAINER = 'c'
SIGNED_RESOURCE_TYPE = 'resource'
SHARED_ACCESS_PERMISSION = 'permission'
#--------------------------------------------------------------------------
class WebResource(object):
'''
Class that stands for the resource to get the share access signature
path: the resource path.
properties: dict of name and values. Contains 2 item: resource type and
permission
request_url: the url of the webresource include all the queries.
'''
def __init__(self, path=None, request_url=None, properties=None):
self.path = path
self.properties = properties or {}
self.request_url = request_url
class Permission(object):
'''
Permission class. Contains the path and query_string for the path.
path: the resource path
query_string: dict of name, values. Contains SIGNED_START, SIGNED_EXPIRY
SIGNED_RESOURCE, SIGNED_PERMISSION, SIGNED_IDENTIFIER,
SIGNED_SIGNATURE name values.
'''
def __init__(self, path=None, query_string=None):
self.path = path
self.query_string = query_string
class SharedAccessPolicy(object):
''' SharedAccessPolicy class. '''
def __init__(self, access_policy, signed_identifier=None):
self.id = signed_identifier
self.access_policy = access_policy
class SharedAccessSignature(object):
'''
The main class used to do the signing and generating the signature.
account_name:
the storage account name used to generate shared access signature
account_key: the access key to genenerate share access signature
permission_set: the permission cache used to signed the request url.
'''
def __init__(self, account_name, account_key, permission_set=None):
self.account_name = account_name
self.account_key = account_key
self.permission_set = permission_set
def generate_signed_query_string(self, path, resource_type,
shared_access_policy,
version=X_MS_VERSION):
'''
Generates the query string for path, resource type and shared access
policy.
path: the resource
resource_type: could be blob or container
shared_access_policy: shared access policy
version:
x-ms-version for storage service, or None to get a signed query
string compatible with pre 2012-02-12 clients, where the version
is not included in the query string.
'''
query_string = {}
if shared_access_policy.access_policy.start:
query_string[
SIGNED_START] = shared_access_policy.access_policy.start
if version:
query_string[SIGNED_VERSION] = version
query_string[SIGNED_EXPIRY] = shared_access_policy.access_policy.expiry
query_string[SIGNED_RESOURCE] = resource_type
query_string[
SIGNED_PERMISSION] = shared_access_policy.access_policy.permission
if shared_access_policy.id:
query_string[SIGNED_IDENTIFIER] = shared_access_policy.id
query_string[SIGNED_SIGNATURE] = self._generate_signature(
path, shared_access_policy, version)
return query_string
def sign_request(self, web_resource):
''' sign request to generate request_url with sharedaccesssignature
info for web_resource.'''
if self.permission_set:
for shared_access_signature in self.permission_set:
if self._permission_matches_request(
shared_access_signature, web_resource,
web_resource.properties[
SIGNED_RESOURCE_TYPE],
web_resource.properties[SHARED_ACCESS_PERMISSION]):
if web_resource.request_url.find('?') == -1:
web_resource.request_url += '?'
else:
web_resource.request_url += '&'
web_resource.request_url += self._convert_query_string(
shared_access_signature.query_string)
break
return web_resource
def _convert_query_string(self, query_string):
''' Converts query string to str. The order of name, values is very
important and can't be wrong.'''
convert_str = ''
if SIGNED_START in query_string:
convert_str += SIGNED_START + '=' + \
url_quote(query_string[SIGNED_START]) + '&'
convert_str += SIGNED_EXPIRY + '=' + \
url_quote(query_string[SIGNED_EXPIRY]) + '&'
convert_str += SIGNED_PERMISSION + '=' + \
query_string[SIGNED_PERMISSION] + '&'
convert_str += SIGNED_RESOURCE + '=' + \
query_string[SIGNED_RESOURCE] + '&'
if SIGNED_IDENTIFIER in query_string:
convert_str += SIGNED_IDENTIFIER + '=' + \
query_string[SIGNED_IDENTIFIER] + '&'
if SIGNED_VERSION in query_string:
convert_str += SIGNED_VERSION + '=' + \
query_string[SIGNED_VERSION] + '&'
convert_str += SIGNED_SIGNATURE + '=' + \
url_quote(query_string[SIGNED_SIGNATURE]) + '&'
return convert_str
def _generate_signature(self, path, shared_access_policy, version):
''' Generates signature for a given path and shared access policy. '''
def get_value_to_append(value, no_new_line=False):
return_value = ''
if value:
return_value = value
if not no_new_line:
return_value += '\n'
return return_value
if path[0] != '/':
path = '/' + path
canonicalized_resource = '/' + self.account_name + path
# Form the string to sign from shared_access_policy and canonicalized
# resource. The order of values is important.
string_to_sign = \
(get_value_to_append(shared_access_policy.access_policy.permission) +
get_value_to_append(shared_access_policy.access_policy.start) +
get_value_to_append(shared_access_policy.access_policy.expiry) +
get_value_to_append(canonicalized_resource))
if version:
string_to_sign += get_value_to_append(shared_access_policy.id)
string_to_sign += get_value_to_append(version, True)
else:
string_to_sign += get_value_to_append(shared_access_policy.id, True)
return self._sign(string_to_sign)
def _permission_matches_request(self, shared_access_signature,
web_resource, resource_type,
required_permission):
''' Check whether requested permission matches given
shared_access_signature, web_resource and resource type. '''
required_resource_type = resource_type
if required_resource_type == RESOURCE_BLOB:
required_resource_type += RESOURCE_CONTAINER
for name, value in shared_access_signature.query_string.items():
if name == SIGNED_RESOURCE and \
required_resource_type.find(value) == -1:
return False
elif name == SIGNED_PERMISSION and \
required_permission.find(value) == -1:
return False
return web_resource.path.find(shared_access_signature.path) != -1
def _sign(self, string_to_sign):
''' use HMAC-SHA256 to sign the string and convert it as base64
encoded string. '''
return _sign_string(self.account_key, string_to_sign)

View File

@ -1,152 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import os
import sys
from azure import (
WindowsAzureError,
DEV_ACCOUNT_NAME,
DEV_ACCOUNT_KEY,
_ERROR_STORAGE_MISSING_INFO,
)
from azure.http import HTTPError
from azure.http.httpclient import _HTTPClient
from azure.storage import _storage_error_handler
#--------------------------------------------------------------------------
# constants for azure app setting environment variables
AZURE_STORAGE_ACCOUNT = 'AZURE_STORAGE_ACCOUNT'
AZURE_STORAGE_ACCESS_KEY = 'AZURE_STORAGE_ACCESS_KEY'
EMULATED = 'EMULATED'
#--------------------------------------------------------------------------
class _StorageClient(object):
'''
This is the base class for BlobManager, TableManager and QueueManager.
'''
def __init__(self, account_name=None, account_key=None, protocol='https',
host_base='', dev_host=''):
'''
account_name: your storage account name, required for all operations.
account_key: your storage account key, required for all operations.
protocol: Optional. Protocol. Defaults to http.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
dev_host: Optional. Dev host url. Defaults to localhost.
'''
self.account_name = account_name
self.account_key = account_key
self.requestid = None
self.protocol = protocol
self.host_base = host_base
self.dev_host = dev_host
# the app is not run in azure emulator or use default development
# storage account and key if app is run in emulator.
self.use_local_storage = False
# check whether it is run in emulator.
if EMULATED in os.environ:
self.is_emulated = os.environ[EMULATED].lower() != 'false'
else:
self.is_emulated = False
# get account_name and account key. If they are not set when
# constructing, get the account and key from environment variables if
# the app is not run in azure emulator or use default development
# storage account and key if app is run in emulator.
if not self.account_name or not self.account_key:
if self.is_emulated:
self.account_name = DEV_ACCOUNT_NAME
self.account_key = DEV_ACCOUNT_KEY
self.protocol = 'http'
self.use_local_storage = True
else:
self.account_name = os.environ.get(AZURE_STORAGE_ACCOUNT)
self.account_key = os.environ.get(AZURE_STORAGE_ACCESS_KEY)
if not self.account_name or not self.account_key:
raise WindowsAzureError(_ERROR_STORAGE_MISSING_INFO)
self._httpclient = _HTTPClient(
service_instance=self,
account_key=self.account_key,
account_name=self.account_name,
protocol=self.protocol)
self._batchclient = None
self._filter = self._perform_request_worker
def with_filter(self, filter):
'''
Returns a new service which will process requests with the specified
filter. Filtering operations can include logging, automatic retrying,
etc... The filter is a lambda which receives the HTTPRequest and
another lambda. The filter can perform any pre-processing on the
request, pass it off to the next lambda, and then perform any
post-processing on the response.
'''
res = type(self)(self.account_name, self.account_key, self.protocol)
old_filter = self._filter
def new_filter(request):
return filter(request, old_filter)
res._filter = new_filter
return res
def set_proxy(self, host, port, user=None, password=None):
'''
Sets the proxy server host and port for the HTTP CONNECT Tunnelling.
host: Address of the proxy. Ex: '192.168.0.100'
port: Port of the proxy. Ex: 6000
user: User for proxy authorization.
password: Password for proxy authorization.
'''
self._httpclient.set_proxy(host, port, user, password)
def _get_host(self):
if self.use_local_storage:
return self.dev_host
else:
return self.account_name + self.host_base
def _perform_request_worker(self, request):
return self._httpclient.perform_request(request)
def _perform_request(self, request, text_encoding='utf-8'):
'''
Sends the request and return response. Catches HTTPError and hand it
to error handler
'''
try:
if self._batchclient is not None:
return self._batchclient.insert_request_to_batch(request)
else:
resp = self._filter(request)
if sys.version_info >= (3,) and isinstance(resp, bytes) and \
text_encoding:
resp = resp.decode(text_encoding)
except HTTPError as ex:
_storage_error_handler(ex)
return resp

View File

@ -1,491 +0,0 @@
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from azure import (
WindowsAzureError,
TABLE_SERVICE_HOST_BASE,
DEV_TABLE_HOST,
_convert_class_to_xml,
_convert_response_to_feeds,
_dont_fail_not_exist,
_dont_fail_on_exist,
_get_request_body,
_int_or_none,
_parse_response,
_parse_response_for_dict,
_parse_response_for_dict_filter,
_str,
_str_or_none,
_update_request_uri_query_local_storage,
_validate_not_none,
)
from azure.http import HTTPRequest
from azure.http.batchclient import _BatchClient
from azure.storage import (
StorageServiceProperties,
_convert_entity_to_xml,
_convert_response_to_entity,
_convert_table_to_xml,
_convert_xml_to_entity,
_convert_xml_to_table,
_sign_storage_table_request,
_update_storage_table_header,
)
from azure.storage.storageclient import _StorageClient
class TableService(_StorageClient):
'''
This is the main class managing Table resources.
'''
def __init__(self, account_name=None, account_key=None, protocol='https',
host_base=TABLE_SERVICE_HOST_BASE, dev_host=DEV_TABLE_HOST):
'''
account_name: your storage account name, required for all operations.
account_key: your storage account key, required for all operations.
protocol: Optional. Protocol. Defaults to http.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
dev_host: Optional. Dev host url. Defaults to localhost.
'''
super(TableService, self).__init__(
account_name, account_key, protocol, host_base, dev_host)
def begin_batch(self):
if self._batchclient is None:
self._batchclient = _BatchClient(
service_instance=self,
account_key=self.account_key,
account_name=self.account_name)
return self._batchclient.begin_batch()
def commit_batch(self):
try:
ret = self._batchclient.commit_batch()
finally:
self._batchclient = None
return ret
def cancel_batch(self):
self._batchclient = None
def get_table_service_properties(self):
'''
Gets the properties of a storage account's Table service, including
Windows Azure Storage Analytics.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/?restype=service&comp=properties'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response(response, StorageServiceProperties)
def set_table_service_properties(self, storage_service_properties):
'''
Sets the properties of a storage account's Table Service, including
Windows Azure Storage Analytics.
storage_service_properties: StorageServiceProperties object.
'''
_validate_not_none('storage_service_properties',
storage_service_properties)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/?restype=service&comp=properties'
request.body = _get_request_body(
_convert_class_to_xml(storage_service_properties))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict(response)
def query_tables(self, table_name=None, top=None, next_table_name=None):
'''
Returns a list of tables under the specified account.
table_name: Optional. The specific table to query.
top: Optional. Maximum number of tables to return.
next_table_name:
Optional. When top is used, the next table name is stored in
result.x_ms_continuation['NextTableName']
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
if table_name is not None:
uri_part_table_name = "('" + table_name + "')"
else:
uri_part_table_name = ""
request.path = '/Tables' + uri_part_table_name + ''
request.query = [
('$top', _int_or_none(top)),
('NextTableName', _str_or_none(next_table_name))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _convert_response_to_feeds(response, _convert_xml_to_table)
def create_table(self, table, fail_on_exist=False):
'''
Creates a new table in the storage account.
table:
Name of the table to create. Table name may contain only
alphanumeric characters and cannot begin with a numeric character.
It is case-insensitive and must be from 3 to 63 characters long.
fail_on_exist: Specify whether throw exception when table exists.
'''
_validate_not_none('table', table)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/Tables'
request.body = _get_request_body(_convert_table_to_xml(table))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
if not fail_on_exist:
try:
self._perform_request(request)
return True
except WindowsAzureError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def delete_table(self, table_name, fail_not_exist=False):
'''
table_name: Name of the table to delete.
fail_not_exist:
Specify whether throw exception when table doesn't exist.
'''
_validate_not_none('table_name', table_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/Tables(\'' + _str(table_name) + '\')'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except WindowsAzureError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_entity(self, table_name, partition_key, row_key, select=''):
'''
Get an entity in a table; includes the $select options.
partition_key: PartitionKey of the entity.
row_key: RowKey of the entity.
select: Property names to select.
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('select', select)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(table_name) + \
'(PartitionKey=\'' + _str(partition_key) + \
'\',RowKey=\'' + \
_str(row_key) + '\')?$select=' + \
_str(select) + ''
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _convert_response_to_entity(response)
def query_entities(self, table_name, filter=None, select=None, top=None,
next_partition_key=None, next_row_key=None):
'''
Get entities in a table; includes the $filter and $select options.
table_name: Table to query.
filter:
Optional. Filter as described at
http://msdn.microsoft.com/en-us/library/windowsazure/dd894031.aspx
select: Optional. Property names to select from the entities.
top: Optional. Maximum number of entities to return.
next_partition_key:
Optional. When top is used, the next partition key is stored in
result.x_ms_continuation['NextPartitionKey']
next_row_key:
Optional. When top is used, the next partition key is stored in
result.x_ms_continuation['NextRowKey']
'''
_validate_not_none('table_name', table_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(table_name) + '()'
request.query = [
('$filter', _str_or_none(filter)),
('$select', _str_or_none(select)),
('$top', _int_or_none(top)),
('NextPartitionKey', _str_or_none(next_partition_key)),
('NextRowKey', _str_or_none(next_row_key))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _convert_response_to_feeds(response, _convert_xml_to_entity)
def insert_entity(self, table_name, entity,
content_type='application/atom+xml'):
'''
Inserts a new entity into a table.
table_name: Table name.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type: Required. Must be set to application/atom+xml
'''
_validate_not_none('table_name', table_name)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(table_name) + ''
request.headers = [('Content-Type', _str_or_none(content_type))]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _convert_response_to_entity(response)
def update_entity(self, table_name, partition_key, row_key, entity,
content_type='application/atom+xml', if_match='*'):
'''
Updates an existing entity in a table. The Update Entity operation
replaces the entire entity and can be used to remove properties.
table_name: Table name.
partition_key: PartitionKey of the entity.
row_key: RowKey of the entity.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type: Required. Must be set to application/atom+xml
if_match:
Optional. Specifies the condition for which the merge should be
performed. To force an unconditional merge, set to the wildcard
character (*).
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [
('Content-Type', _str_or_none(content_type)),
('If-Match', _str_or_none(if_match))
]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def merge_entity(self, table_name, partition_key, row_key, entity,
content_type='application/atom+xml', if_match='*'):
'''
Updates an existing entity by updating the entity's properties. This
operation does not replace the existing entity as the Update Entity
operation does.
table_name: Table name.
partition_key: PartitionKey of the entity.
row_key: RowKey of the entity.
entity:
Required. The entity object to insert. Can be a dict format or
entity object.
content_type: Required. Must be set to application/atom+xml
if_match:
Optional. Specifies the condition for which the merge should be
performed. To force an unconditional merge, set to the wildcard
character (*).
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'MERGE'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [
('Content-Type', _str_or_none(content_type)),
('If-Match', _str_or_none(if_match))
]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def delete_entity(self, table_name, partition_key, row_key,
content_type='application/atom+xml', if_match='*'):
'''
Deletes an existing entity in a table.
table_name: Table name.
partition_key: PartitionKey of the entity.
row_key: RowKey of the entity.
content_type: Required. Must be set to application/atom+xml
if_match:
Optional. Specifies the condition for which the delete should be
performed. To force an unconditional delete, set to the wildcard
character (*).
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('content_type', content_type)
_validate_not_none('if_match', if_match)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [
('Content-Type', _str_or_none(content_type)),
('If-Match', _str_or_none(if_match))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
self._perform_request(request)
def insert_or_replace_entity(self, table_name, partition_key, row_key,
entity, content_type='application/atom+xml'):
'''
Replaces an existing entity or inserts a new entity if it does not
exist in the table. Because this operation can insert or update an
entity, it is also known as an "upsert" operation.
table_name: Table name.
partition_key: PartitionKey of the entity.
row_key: RowKey of the entity.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type: Required. Must be set to application/atom+xml
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [('Content-Type', _str_or_none(content_type))]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def insert_or_merge_entity(self, table_name, partition_key, row_key,
entity, content_type='application/atom+xml'):
'''
Merges an existing entity or inserts a new entity if it does not exist
in the table. Because this operation can insert or update an entity,
it is also known as an "upsert" operation.
table_name: Table name.
partition_key: PartitionKey of the entity.
row_key: RowKey of the entity.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type: Required. Must be set to application/atom+xml
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'MERGE'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [('Content-Type', _str_or_none(content_type))]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def _perform_request_worker(self, request):
auth = _sign_storage_table_request(request,
self.account_name,
self.account_key)
request.headers.append(('Authorization', auth))
return self._httpclient.perform_request(request)

View File

@ -1,24 +0,0 @@
# -*- coding: utf-8 -*-
"""
babel
~~~~~
Integrated collection of utilities that assist in internationalizing and
localizing applications.
This package is basically composed of two major parts:
* tools to build and work with ``gettext`` message catalogs
* a Python interface to the CLDR (Common Locale Data Repository), providing
access to various locale display names, localized number and date
formatting, etc.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
from babel.core import UnknownLocaleError, Locale, default_locale, \
negotiate_locale, parse_locale, get_locale_identifier
__version__ = '1.3'

View File

@ -1,51 +0,0 @@
import sys
PY2 = sys.version_info[0] == 2
_identity = lambda x: x
if not PY2:
text_type = str
string_types = (str,)
integer_types = (int, )
unichr = chr
text_to_native = lambda s, enc: s
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
from io import StringIO, BytesIO
import pickle
izip = zip
imap = map
range_type = range
cmp = lambda a, b: (a > b) - (a < b)
else:
text_type = unicode
string_types = (str, unicode)
integer_types = (int, long)
text_to_native = lambda s, enc: s.encode(enc)
unichr = unichr
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
from cStringIO import StringIO as BytesIO
from StringIO import StringIO
import cPickle as pickle
from itertools import izip, imap
range_type = xrange
cmp = cmp
number_types = integer_types + (float,)

View File

@ -1,941 +0,0 @@
# -*- coding: utf-8 -*-
"""
babel.core
~~~~~~~~~~
Core locale representation and locale data access.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
import os
from babel import localedata
from babel._compat import pickle, string_types
__all__ = ['UnknownLocaleError', 'Locale', 'default_locale', 'negotiate_locale',
'parse_locale']
_global_data = None
def _raise_no_data_error():
raise RuntimeError('The babel data files are not available. '
'This usually happens because you are using '
'a source checkout from Babel and you did '
'not build the data files. Just make sure '
'to run "python setup.py import_cldr" before '
'installing the library.')
def get_global(key):
"""Return the dictionary for the given key in the global data.
The global data is stored in the ``babel/global.dat`` file and contains
information independent of individual locales.
>>> get_global('zone_aliases')['UTC']
u'Etc/GMT'
>>> get_global('zone_territories')['Europe/Berlin']
u'DE'
.. versionadded:: 0.9
:param key: the data key
"""
global _global_data
if _global_data is None:
dirname = os.path.join(os.path.dirname(__file__))
filename = os.path.join(dirname, 'global.dat')
if not os.path.isfile(filename):
_raise_no_data_error()
fileobj = open(filename, 'rb')
try:
_global_data = pickle.load(fileobj)
finally:
fileobj.close()
return _global_data.get(key, {})
LOCALE_ALIASES = {
'ar': 'ar_SY', 'bg': 'bg_BG', 'bs': 'bs_BA', 'ca': 'ca_ES', 'cs': 'cs_CZ',
'da': 'da_DK', 'de': 'de_DE', 'el': 'el_GR', 'en': 'en_US', 'es': 'es_ES',
'et': 'et_EE', 'fa': 'fa_IR', 'fi': 'fi_FI', 'fr': 'fr_FR', 'gl': 'gl_ES',
'he': 'he_IL', 'hu': 'hu_HU', 'id': 'id_ID', 'is': 'is_IS', 'it': 'it_IT',
'ja': 'ja_JP', 'km': 'km_KH', 'ko': 'ko_KR', 'lt': 'lt_LT', 'lv': 'lv_LV',
'mk': 'mk_MK', 'nl': 'nl_NL', 'nn': 'nn_NO', 'no': 'nb_NO', 'pl': 'pl_PL',
'pt': 'pt_PT', 'ro': 'ro_RO', 'ru': 'ru_RU', 'sk': 'sk_SK', 'sl': 'sl_SI',
'sv': 'sv_SE', 'th': 'th_TH', 'tr': 'tr_TR', 'uk': 'uk_UA'
}
class UnknownLocaleError(Exception):
"""Exception thrown when a locale is requested for which no locale data
is available.
"""
def __init__(self, identifier):
"""Create the exception.
:param identifier: the identifier string of the unsupported locale
"""
Exception.__init__(self, 'unknown locale %r' % identifier)
#: The identifier of the locale that could not be found.
self.identifier = identifier
class Locale(object):
"""Representation of a specific locale.
>>> locale = Locale('en', 'US')
>>> repr(locale)
"Locale('en', territory='US')"
>>> locale.display_name
u'English (United States)'
A `Locale` object can also be instantiated from a raw locale string:
>>> locale = Locale.parse('en-US', sep='-')
>>> repr(locale)
"Locale('en', territory='US')"
`Locale` objects provide access to a collection of locale data, such as
territory and language names, number and date format patterns, and more:
>>> locale.number_symbols['decimal']
u'.'
If a locale is requested for which no locale data is available, an
`UnknownLocaleError` is raised:
>>> Locale.parse('en_DE')
Traceback (most recent call last):
...
UnknownLocaleError: unknown locale 'en_DE'
For more information see :rfc:`3066`.
"""
def __init__(self, language, territory=None, script=None, variant=None):
"""Initialize the locale object from the given identifier components.
>>> locale = Locale('en', 'US')
>>> locale.language
'en'
>>> locale.territory
'US'
:param language: the language code
:param territory: the territory (country or region) code
:param script: the script code
:param variant: the variant code
:raise `UnknownLocaleError`: if no locale data is available for the
requested locale
"""
#: the language code
self.language = language
#: the territory (country or region) code
self.territory = territory
#: the script code
self.script = script
#: the variant code
self.variant = variant
self.__data = None
identifier = str(self)
if not localedata.exists(identifier):
raise UnknownLocaleError(identifier)
@classmethod
def default(cls, category=None, aliases=LOCALE_ALIASES):
"""Return the system default locale for the specified category.
>>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LC_MESSAGES']:
... os.environ[name] = ''
>>> os.environ['LANG'] = 'fr_FR.UTF-8'
>>> Locale.default('LC_MESSAGES')
Locale('fr', territory='FR')
The following fallbacks to the variable are always considered:
- ``LANGUAGE``
- ``LC_ALL``
- ``LC_CTYPE``
- ``LANG``
:param category: one of the ``LC_XXX`` environment variable names
:param aliases: a dictionary of aliases for locale identifiers
"""
# XXX: use likely subtag expansion here instead of the
# aliases dictionary.
locale_string = default_locale(category, aliases=aliases)
return cls.parse(locale_string)
@classmethod
def negotiate(cls, preferred, available, sep='_', aliases=LOCALE_ALIASES):
"""Find the best match between available and requested locale strings.
>>> Locale.negotiate(['de_DE', 'en_US'], ['de_DE', 'de_AT'])
Locale('de', territory='DE')
>>> Locale.negotiate(['de_DE', 'en_US'], ['en', 'de'])
Locale('de')
>>> Locale.negotiate(['de_DE', 'de'], ['en_US'])
You can specify the character used in the locale identifiers to separate
the differnet components. This separator is applied to both lists. Also,
case is ignored in the comparison:
>>> Locale.negotiate(['de-DE', 'de'], ['en-us', 'de-de'], sep='-')
Locale('de', territory='DE')
:param preferred: the list of locale identifers preferred by the user
:param available: the list of locale identifiers available
:param aliases: a dictionary of aliases for locale identifiers
"""
identifier = negotiate_locale(preferred, available, sep=sep,
aliases=aliases)
if identifier:
return Locale.parse(identifier, sep=sep)
@classmethod
def parse(cls, identifier, sep='_', resolve_likely_subtags=True):
"""Create a `Locale` instance for the given locale identifier.
>>> l = Locale.parse('de-DE', sep='-')
>>> l.display_name
u'Deutsch (Deutschland)'
If the `identifier` parameter is not a string, but actually a `Locale`
object, that object is returned:
>>> Locale.parse(l)
Locale('de', territory='DE')
This also can perform resolving of likely subtags which it does
by default. This is for instance useful to figure out the most
likely locale for a territory you can use ``'und'`` as the
language tag:
>>> Locale.parse('und_AT')
Locale('de', territory='AT')
:param identifier: the locale identifier string
:param sep: optional component separator
:param resolve_likely_subtags: if this is specified then a locale will
have its likely subtag resolved if the
locale otherwise does not exist. For
instance ``zh_TW`` by itself is not a
locale that exists but Babel can
automatically expand it to the full
form of ``zh_hant_TW``. Note that this
expansion is only taking place if no
locale exists otherwise. For instance
there is a locale ``en`` that can exist
by itself.
:raise `ValueError`: if the string does not appear to be a valid locale
identifier
:raise `UnknownLocaleError`: if no locale data is available for the
requested locale
"""
if identifier is None:
return None
elif isinstance(identifier, Locale):
return identifier
elif not isinstance(identifier, string_types):
raise TypeError('Unxpected value for identifier: %r' % (identifier,))
parts = parse_locale(identifier, sep=sep)
input_id = get_locale_identifier(parts)
def _try_load(parts):
try:
return cls(*parts)
except UnknownLocaleError:
return None
def _try_load_reducing(parts):
# Success on first hit, return it.
locale = _try_load(parts)
if locale is not None:
return locale
# Now try without script and variant
locale = _try_load(parts[:2])
if locale is not None:
return locale
locale = _try_load(parts)
if locale is not None:
return locale
if not resolve_likely_subtags:
raise UnknownLocaleError(input_id)
# From here onwards is some very bad likely subtag resolving. This
# whole logic is not entirely correct but good enough (tm) for the
# time being. This has been added so that zh_TW does not cause
# errors for people when they upgrade. Later we should properly
# implement ICU like fuzzy locale objects and provide a way to
# maximize and minimize locale tags.
language, territory, script, variant = parts
language = get_global('language_aliases').get(language, language)
territory = get_global('territory_aliases').get(territory, territory)
script = get_global('script_aliases').get(script, script)
variant = get_global('variant_aliases').get(variant, variant)
if territory == 'ZZ':
territory = None
if script == 'Zzzz':
script = None
parts = language, territory, script, variant
# First match: try the whole identifier
new_id = get_locale_identifier(parts)
likely_subtag = get_global('likely_subtags').get(new_id)
if likely_subtag is not None:
locale = _try_load_reducing(parse_locale(likely_subtag))
if locale is not None:
return locale
# If we did not find anything so far, try again with a
# simplified identifier that is just the language
likely_subtag = get_global('likely_subtags').get(language)
if likely_subtag is not None:
language2, _, script2, variant2 = parse_locale(likely_subtag)
locale = _try_load_reducing((language2, territory, script2, variant2))
if locale is not None:
return locale
raise UnknownLocaleError(input_id)
def __eq__(self, other):
for key in ('language', 'territory', 'script', 'variant'):
if not hasattr(other, key):
return False
return (self.language == other.language) and \
(self.territory == other.territory) and \
(self.script == other.script) and \
(self.variant == other.variant)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
parameters = ['']
for key in ('territory', 'script', 'variant'):
value = getattr(self, key)
if value is not None:
parameters.append('%s=%r' % (key, value))
parameter_string = '%r' % self.language + ', '.join(parameters)
return 'Locale(%s)' % parameter_string
def __str__(self):
return get_locale_identifier((self.language, self.territory,
self.script, self.variant))
@property
def _data(self):
if self.__data is None:
self.__data = localedata.LocaleDataDict(localedata.load(str(self)))
return self.__data
def get_display_name(self, locale=None):
"""Return the display name of the locale using the given locale.
The display name will include the language, territory, script, and
variant, if those are specified.
>>> Locale('zh', 'CN', script='Hans').get_display_name('en')
u'Chinese (Simplified, China)'
:param locale: the locale to use
"""
if locale is None:
locale = self
locale = Locale.parse(locale)
retval = locale.languages.get(self.language)
if self.territory or self.script or self.variant:
details = []
if self.script:
details.append(locale.scripts.get(self.script))
if self.territory:
details.append(locale.territories.get(self.territory))
if self.variant:
details.append(locale.variants.get(self.variant))
details = filter(None, details)
if details:
retval += ' (%s)' % u', '.join(details)
return retval
display_name = property(get_display_name, doc="""\
The localized display name of the locale.
>>> Locale('en').display_name
u'English'
>>> Locale('en', 'US').display_name
u'English (United States)'
>>> Locale('sv').display_name
u'svenska'
:type: `unicode`
""")
def get_language_name(self, locale=None):
"""Return the language of this locale in the given locale.
>>> Locale('zh', 'CN', script='Hans').get_language_name('de')
u'Chinesisch'
.. versionadded:: 1.0
:param locale: the locale to use
"""
if locale is None:
locale = self
locale = Locale.parse(locale)
return locale.languages.get(self.language)
language_name = property(get_language_name, doc="""\
The localized language name of the locale.
>>> Locale('en', 'US').language_name
u'English'
""")
def get_territory_name(self, locale=None):
"""Return the territory name in the given locale."""
if locale is None:
locale = self
locale = Locale.parse(locale)
return locale.territories.get(self.territory)
territory_name = property(get_territory_name, doc="""\
The localized territory name of the locale if available.
>>> Locale('de', 'DE').territory_name
u'Deutschland'
""")
def get_script_name(self, locale=None):
"""Return the script name in the given locale."""
if locale is None:
locale = self
locale = Locale.parse(locale)
return locale.scripts.get(self.script)
script_name = property(get_script_name, doc="""\
The localized script name of the locale if available.
>>> Locale('ms', 'SG', script='Latn').script_name
u'Latin'
""")
@property
def english_name(self):
"""The english display name of the locale.
>>> Locale('de').english_name
u'German'
>>> Locale('de', 'DE').english_name
u'German (Germany)'
:type: `unicode`"""
return self.get_display_name(Locale('en'))
#{ General Locale Display Names
@property
def languages(self):
"""Mapping of language codes to translated language names.
>>> Locale('de', 'DE').languages['ja']
u'Japanisch'
See `ISO 639 <http://www.loc.gov/standards/iso639-2/>`_ for
more information.
"""
return self._data['languages']
@property
def scripts(self):
"""Mapping of script codes to translated script names.
>>> Locale('en', 'US').scripts['Hira']
u'Hiragana'
See `ISO 15924 <http://www.evertype.com/standards/iso15924/>`_
for more information.
"""
return self._data['scripts']
@property
def territories(self):
"""Mapping of script codes to translated script names.
>>> Locale('es', 'CO').territories['DE']
u'Alemania'
See `ISO 3166 <http://www.iso.org/iso/en/prods-services/iso3166ma/>`_
for more information.
"""
return self._data['territories']
@property
def variants(self):
"""Mapping of script codes to translated script names.
>>> Locale('de', 'DE').variants['1901']
u'Alte deutsche Rechtschreibung'
"""
return self._data['variants']
#{ Number Formatting
@property
def currencies(self):
"""Mapping of currency codes to translated currency names. This
only returns the generic form of the currency name, not the count
specific one. If an actual number is requested use the
:func:`babel.numbers.get_currency_name` function.
>>> Locale('en').currencies['COP']
u'Colombian Peso'
>>> Locale('de', 'DE').currencies['COP']
u'Kolumbianischer Peso'
"""
return self._data['currency_names']
@property
def currency_symbols(self):
"""Mapping of currency codes to symbols.
>>> Locale('en', 'US').currency_symbols['USD']
u'$'
>>> Locale('es', 'CO').currency_symbols['USD']
u'US$'
"""
return self._data['currency_symbols']
@property
def number_symbols(self):
"""Symbols used in number formatting.
>>> Locale('fr', 'FR').number_symbols['decimal']
u','
"""
return self._data['number_symbols']
@property
def decimal_formats(self):
"""Locale patterns for decimal number formatting.
>>> Locale('en', 'US').decimal_formats[None]
<NumberPattern u'#,##0.###'>
"""
return self._data['decimal_formats']
@property
def currency_formats(self):
"""Locale patterns for currency number formatting.
>>> print Locale('en', 'US').currency_formats[None]
<NumberPattern u'\\xa4#,##0.00'>
"""
return self._data['currency_formats']
@property
def percent_formats(self):
"""Locale patterns for percent number formatting.
>>> Locale('en', 'US').percent_formats[None]
<NumberPattern u'#,##0%'>
"""
return self._data['percent_formats']
@property
def scientific_formats(self):
"""Locale patterns for scientific number formatting.
>>> Locale('en', 'US').scientific_formats[None]
<NumberPattern u'#E0'>
"""
return self._data['scientific_formats']
#{ Calendar Information and Date Formatting
@property
def periods(self):
"""Locale display names for day periods (AM/PM).
>>> Locale('en', 'US').periods['am']
u'AM'
"""
return self._data['periods']
@property
def days(self):
"""Locale display names for weekdays.
>>> Locale('de', 'DE').days['format']['wide'][3]
u'Donnerstag'
"""
return self._data['days']
@property
def months(self):
"""Locale display names for months.
>>> Locale('de', 'DE').months['format']['wide'][10]
u'Oktober'
"""
return self._data['months']
@property
def quarters(self):
"""Locale display names for quarters.
>>> Locale('de', 'DE').quarters['format']['wide'][1]
u'1. Quartal'
"""
return self._data['quarters']
@property
def eras(self):
"""Locale display names for eras.
>>> Locale('en', 'US').eras['wide'][1]
u'Anno Domini'
>>> Locale('en', 'US').eras['abbreviated'][0]
u'BC'
"""
return self._data['eras']
@property
def time_zones(self):
"""Locale display names for time zones.
>>> Locale('en', 'US').time_zones['Europe/London']['long']['daylight']
u'British Summer Time'
>>> Locale('en', 'US').time_zones['America/St_Johns']['city']
u'St. John\u2019s'
"""
return self._data['time_zones']
@property
def meta_zones(self):
"""Locale display names for meta time zones.
Meta time zones are basically groups of different Olson time zones that
have the same GMT offset and daylight savings time.
>>> Locale('en', 'US').meta_zones['Europe_Central']['long']['daylight']
u'Central European Summer Time'
.. versionadded:: 0.9
"""
return self._data['meta_zones']
@property
def zone_formats(self):
"""Patterns related to the formatting of time zones.
>>> Locale('en', 'US').zone_formats['fallback']
u'%(1)s (%(0)s)'
>>> Locale('pt', 'BR').zone_formats['region']
u'Hor\\xe1rio %s'
.. versionadded:: 0.9
"""
return self._data['zone_formats']
@property
def first_week_day(self):
"""The first day of a week, with 0 being Monday.
>>> Locale('de', 'DE').first_week_day
0
>>> Locale('en', 'US').first_week_day
6
"""
return self._data['week_data']['first_day']
@property
def weekend_start(self):
"""The day the weekend starts, with 0 being Monday.
>>> Locale('de', 'DE').weekend_start
5
"""
return self._data['week_data']['weekend_start']
@property
def weekend_end(self):
"""The day the weekend ends, with 0 being Monday.
>>> Locale('de', 'DE').weekend_end
6
"""
return self._data['week_data']['weekend_end']
@property
def min_week_days(self):
"""The minimum number of days in a week so that the week is counted as
the first week of a year or month.
>>> Locale('de', 'DE').min_week_days
4
"""
return self._data['week_data']['min_days']
@property
def date_formats(self):
"""Locale patterns for date formatting.
>>> Locale('en', 'US').date_formats['short']
<DateTimePattern u'M/d/yy'>
>>> Locale('fr', 'FR').date_formats['long']
<DateTimePattern u'd MMMM y'>
"""
return self._data['date_formats']
@property
def time_formats(self):
"""Locale patterns for time formatting.
>>> Locale('en', 'US').time_formats['short']
<DateTimePattern u'h:mm a'>
>>> Locale('fr', 'FR').time_formats['long']
<DateTimePattern u'HH:mm:ss z'>
"""
return self._data['time_formats']
@property
def datetime_formats(self):
"""Locale patterns for datetime formatting.
>>> Locale('en').datetime_formats['full']
u"{1} 'at' {0}"
>>> Locale('th').datetime_formats['medium']
u'{1}, {0}'
"""
return self._data['datetime_formats']
@property
def plural_form(self):
"""Plural rules for the locale.
>>> Locale('en').plural_form(1)
'one'
>>> Locale('en').plural_form(0)
'other'
>>> Locale('fr').plural_form(0)
'one'
>>> Locale('ru').plural_form(100)
'many'
"""
return self._data['plural_form']
def default_locale(category=None, aliases=LOCALE_ALIASES):
"""Returns the system default locale for a given category, based on
environment variables.
>>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE']:
... os.environ[name] = ''
>>> os.environ['LANG'] = 'fr_FR.UTF-8'
>>> default_locale('LC_MESSAGES')
'fr_FR'
The "C" or "POSIX" pseudo-locales are treated as aliases for the
"en_US_POSIX" locale:
>>> os.environ['LC_MESSAGES'] = 'POSIX'
>>> default_locale('LC_MESSAGES')
'en_US_POSIX'
The following fallbacks to the variable are always considered:
- ``LANGUAGE``
- ``LC_ALL``
- ``LC_CTYPE``
- ``LANG``
:param category: one of the ``LC_XXX`` environment variable names
:param aliases: a dictionary of aliases for locale identifiers
"""
varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG')
for name in filter(None, varnames):
locale = os.getenv(name)
if locale:
if name == 'LANGUAGE' and ':' in locale:
# the LANGUAGE variable may contain a colon-separated list of
# language codes; we just pick the language on the list
locale = locale.split(':')[0]
if locale in ('C', 'POSIX'):
locale = 'en_US_POSIX'
elif aliases and locale in aliases:
locale = aliases[locale]
try:
return get_locale_identifier(parse_locale(locale))
except ValueError:
pass
def negotiate_locale(preferred, available, sep='_', aliases=LOCALE_ALIASES):
"""Find the best match between available and requested locale strings.
>>> negotiate_locale(['de_DE', 'en_US'], ['de_DE', 'de_AT'])
'de_DE'
>>> negotiate_locale(['de_DE', 'en_US'], ['en', 'de'])
'de'
Case is ignored by the algorithm, the result uses the case of the preferred
locale identifier:
>>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at'])
'de_DE'
>>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at'])
'de_DE'
By default, some web browsers unfortunately do not include the territory
in the locale identifier for many locales, and some don't even allow the
user to easily add the territory. So while you may prefer using qualified
locale identifiers in your web-application, they would not normally match
the language-only locale sent by such browsers. To workaround that, this
function uses a default mapping of commonly used langauge-only locale
identifiers to identifiers including the territory:
>>> negotiate_locale(['ja', 'en_US'], ['ja_JP', 'en_US'])
'ja_JP'
Some browsers even use an incorrect or outdated language code, such as "no"
for Norwegian, where the correct locale identifier would actually be "nb_NO"
(Bokmål) or "nn_NO" (Nynorsk). The aliases are intended to take care of
such cases, too:
>>> negotiate_locale(['no', 'sv'], ['nb_NO', 'sv_SE'])
'nb_NO'
You can override this default mapping by passing a different `aliases`
dictionary to this function, or you can bypass the behavior althogher by
setting the `aliases` parameter to `None`.
:param preferred: the list of locale strings preferred by the user
:param available: the list of locale strings available
:param sep: character that separates the different parts of the locale
strings
:param aliases: a dictionary of aliases for locale identifiers
"""
available = [a.lower() for a in available if a]
for locale in preferred:
ll = locale.lower()
if ll in available:
return locale
if aliases:
alias = aliases.get(ll)
if alias:
alias = alias.replace('_', sep)
if alias.lower() in available:
return alias
parts = locale.split(sep)
if len(parts) > 1 and parts[0].lower() in available:
return parts[0]
return None
def parse_locale(identifier, sep='_'):
"""Parse a locale identifier into a tuple of the form ``(language,
territory, script, variant)``.
>>> parse_locale('zh_CN')
('zh', 'CN', None, None)
>>> parse_locale('zh_Hans_CN')
('zh', 'CN', 'Hans', None)
The default component separator is "_", but a different separator can be
specified using the `sep` parameter:
>>> parse_locale('zh-CN', sep='-')
('zh', 'CN', None, None)
If the identifier cannot be parsed into a locale, a `ValueError` exception
is raised:
>>> parse_locale('not_a_LOCALE_String')
Traceback (most recent call last):
...
ValueError: 'not_a_LOCALE_String' is not a valid locale identifier
Encoding information and locale modifiers are removed from the identifier:
>>> parse_locale('it_IT@euro')
('it', 'IT', None, None)
>>> parse_locale('en_US.UTF-8')
('en', 'US', None, None)
>>> parse_locale('de_DE.iso885915@euro')
('de', 'DE', None, None)
See :rfc:`4646` for more information.
:param identifier: the locale identifier string
:param sep: character that separates the different components of the locale
identifier
:raise `ValueError`: if the string does not appear to be a valid locale
identifier
"""
if '.' in identifier:
# this is probably the charset/encoding, which we don't care about
identifier = identifier.split('.', 1)[0]
if '@' in identifier:
# this is a locale modifier such as @euro, which we don't care about
# either
identifier = identifier.split('@', 1)[0]
parts = identifier.split(sep)
lang = parts.pop(0).lower()
if not lang.isalpha():
raise ValueError('expected only letters, got %r' % lang)
script = territory = variant = None
if parts:
if len(parts[0]) == 4 and parts[0].isalpha():
script = parts.pop(0).title()
if parts:
if len(parts[0]) == 2 and parts[0].isalpha():
territory = parts.pop(0).upper()
elif len(parts[0]) == 3 and parts[0].isdigit():
territory = parts.pop(0)
if parts:
if len(parts[0]) == 4 and parts[0][0].isdigit() or \
len(parts[0]) >= 5 and parts[0][0].isalpha():
variant = parts.pop()
if parts:
raise ValueError('%r is not a valid locale identifier' % identifier)
return lang, territory, script, variant
def get_locale_identifier(tup, sep='_'):
"""The reverse of :func:`parse_locale`. It creates a locale identifier out
of a ``(language, territory, script, variant)`` tuple. Items can be set to
``None`` and trailing ``None``\s can also be left out of the tuple.
>>> get_locale_identifier(('de', 'DE', None, '1999'))
'de_DE_1999'
.. versionadded:: 1.0
:param tup: the tuple as returned by :func:`parse_locale`.
:param sep: the separator for the identifier.
"""
tup = tuple(tup[:4])
lang, territory, script, variant = tup + (None,) * (4 - len(tup))
return sep.join(filter(None, (lang, script, territory, variant)))

File diff suppressed because it is too large Load Diff

View File

@ -1,209 +0,0 @@
# -*- coding: utf-8 -*-
"""
babel.localedata
~~~~~~~~~~~~~~~~
Low-level locale data access.
:note: The `Locale` class, which uses this module under the hood, provides a
more convenient interface for accessing the locale data.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
import os
import threading
from collections import MutableMapping
from babel._compat import pickle
_cache = {}
_cache_lock = threading.RLock()
_dirname = os.path.join(os.path.dirname(__file__), 'localedata')
def exists(name):
"""Check whether locale data is available for the given locale. Ther
return value is `True` if it exists, `False` otherwise.
:param name: the locale identifier string
"""
if name in _cache:
return True
return os.path.exists(os.path.join(_dirname, '%s.dat' % name))
def locale_identifiers():
"""Return a list of all locale identifiers for which locale data is
available.
.. versionadded:: 0.8.1
:return: a list of locale identifiers (strings)
"""
return [stem for stem, extension in [
os.path.splitext(filename) for filename in os.listdir(_dirname)
] if extension == '.dat' and stem != 'root']
def load(name, merge_inherited=True):
"""Load the locale data for the given locale.
The locale data is a dictionary that contains much of the data defined by
the Common Locale Data Repository (CLDR). This data is stored as a
collection of pickle files inside the ``babel`` package.
>>> d = load('en_US')
>>> d['languages']['sv']
u'Swedish'
Note that the results are cached, and subsequent requests for the same
locale return the same dictionary:
>>> d1 = load('en_US')
>>> d2 = load('en_US')
>>> d1 is d2
True
:param name: the locale identifier string (or "root")
:param merge_inherited: whether the inherited data should be merged into
the data of the requested locale
:raise `IOError`: if no locale data file is found for the given locale
identifer, or one of the locales it inherits from
"""
_cache_lock.acquire()
try:
data = _cache.get(name)
if not data:
# Load inherited data
if name == 'root' or not merge_inherited:
data = {}
else:
parts = name.split('_')
if len(parts) == 1:
parent = 'root'
else:
parent = '_'.join(parts[:-1])
data = load(parent).copy()
filename = os.path.join(_dirname, '%s.dat' % name)
fileobj = open(filename, 'rb')
try:
if name != 'root' and merge_inherited:
merge(data, pickle.load(fileobj))
else:
data = pickle.load(fileobj)
_cache[name] = data
finally:
fileobj.close()
return data
finally:
_cache_lock.release()
def merge(dict1, dict2):
"""Merge the data from `dict2` into the `dict1` dictionary, making copies
of nested dictionaries.
>>> d = {1: 'foo', 3: 'baz'}
>>> merge(d, {1: 'Foo', 2: 'Bar'})
>>> items = d.items(); items.sort(); items
[(1, 'Foo'), (2, 'Bar'), (3, 'baz')]
:param dict1: the dictionary to merge into
:param dict2: the dictionary containing the data that should be merged
"""
for key, val2 in dict2.items():
if val2 is not None:
val1 = dict1.get(key)
if isinstance(val2, dict):
if val1 is None:
val1 = {}
if isinstance(val1, Alias):
val1 = (val1, val2)
elif isinstance(val1, tuple):
alias, others = val1
others = others.copy()
merge(others, val2)
val1 = (alias, others)
else:
val1 = val1.copy()
merge(val1, val2)
else:
val1 = val2
dict1[key] = val1
class Alias(object):
"""Representation of an alias in the locale data.
An alias is a value that refers to some other part of the locale data,
as specified by the `keys`.
"""
def __init__(self, keys):
self.keys = tuple(keys)
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.keys)
def resolve(self, data):
"""Resolve the alias based on the given data.
This is done recursively, so if one alias resolves to a second alias,
that second alias will also be resolved.
:param data: the locale data
:type data: `dict`
"""
base = data
for key in self.keys:
data = data[key]
if isinstance(data, Alias):
data = data.resolve(base)
elif isinstance(data, tuple):
alias, others = data
data = alias.resolve(base)
return data
class LocaleDataDict(MutableMapping):
"""Dictionary wrapper that automatically resolves aliases to the actual
values.
"""
def __init__(self, data, base=None):
self._data = data
if base is None:
base = data
self.base = base
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
orig = val = self._data[key]
if isinstance(val, Alias): # resolve an alias
val = val.resolve(self.base)
if isinstance(val, tuple): # Merge a partial dict with an alias
alias, others = val
val = alias.resolve(self.base).copy()
merge(val, others)
if type(val) is dict: # Return a nested alias-resolving dict
val = LocaleDataDict(val, base=self.base)
if val is not orig:
self._data[key] = val
return val
def __setitem__(self, key, value):
self._data[key] = value
def __delitem__(self, key):
del self._data[key]
def copy(self):
return LocaleDataDict(self._data.copy(), base=self.base)

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (Umin_daysqKU weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq }q!Uvariantsq"}q#Ucurrency_namesq$}q%U unit_patternsq&}q'u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (Umin_daysqKU weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq }q!Uvariantsq"}q#Ucurrency_namesq$}q%U unit_patternsq&}q'u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (Umin_daysqKU weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq }q!Uvariantsq"}q#Ucurrency_namesq$}q%U unit_patternsq&}q'u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (U weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}q Uvariantsq!}q"Ucurrency_namesq#}q$U unit_patternsq%}q&u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (U weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}q Uvariantsq!}q"Ucurrency_namesq#}q$U unit_patternsq%}q&u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (Umin_daysqKU weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq }q!Uvariantsq"}q#Ucurrency_namesq$}q%U unit_patternsq&}q'u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (U weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}q Uvariantsq!}q"Ucurrency_namesq#}q$U unit_patternsq%}q&u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (U weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}q Uvariantsq!}q"Ucurrency_namesq#}q$U unit_patternsq%}q&u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (U weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}q Uvariantsq!}q"Ucurrency_namesq#}q$U unit_patternsq%}q&u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (Umin_daysqKU weekend_startqKU first_dayqKU weekend_endqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq }q!Uvariantsq"}q#Ucurrency_namesq$}q%U unit_patternsq&}q'u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q U zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}qUvariantsq}qUcurrency_namesq }q!U unit_patternsq"}q#u.

View File

@ -1,4 +0,0 @@
}q(Ucurrency_symbolsq}qUscientific_formatsq}qUpercent_formatsq}qUnumber_symbolsq}q Ucurrency_names_pluralq
}q U week_dataq }q (U weekend_startqKU first_dayqKuU zone_formatsq}qUcurrency_formatsq}qU_versionqM5 U languagesq}qU territoriesq}U
time_zonesq}qUscriptsq}qUdecimal_formatsq}qU
meta_zonesq}qUvariantsq }q!Ucurrency_namesq"}q#U unit_patternsq$}q%u.

Some files were not shown because too many files have changed in this diff Show More