From 227b1d8d6209ba8ec35ef7a3e15487a48c815f22 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 12 Mar 2013 00:19:25 +0000 Subject: [PATCH 001/182] Needed for Identity --- tools/pip-options | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tools/pip-options b/tools/pip-options index b2089f9..29a631f 100644 --- a/tools/pip-options +++ b/tools/pip-options @@ -2,3 +2,7 @@ alembic SQLAlchemy>=0.7.8,<=0.7.9 kombu + +# Identity +python-memcached +passlib From 2fb1356be983e43166ae7dcffd36d4e6286c2696 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 12 Mar 2013 00:52:17 +0000 Subject: [PATCH 002/182] Test login --- billingstack/tests/identity/test_api.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/billingstack/tests/identity/test_api.py b/billingstack/tests/identity/test_api.py index 36f7181..e42a931 100644 --- a/billingstack/tests/identity/test_api.py +++ b/billingstack/tests/identity/test_api.py @@ -35,14 +35,14 @@ def setUp(self): storage_driver='sqlalchemy', group='service:identity_api' ) - + self.config( database_connection='sqlite://', group='identity:sqlalchemy') self.plugin = IdentityPlugin.get_plugin(invoke_on_load=True) self.plugin.setup_schema() - + self.app = self.make_app() def tearDown(self): @@ -258,4 +258,12 @@ def test_revoke_grant(self): self.put(url, {}) - self.delete(url) \ No newline at end of file + self.delete(url) + + def test_login(self): + user_data = self.get_fixture('user') + user = self.post('users', user_data).json + + resp = self.post('tokens', user_data) + + assert 'token' in resp.json From f474fe3eadb7287d12b5f78c45383fe1c94ac724 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 12 Mar 2013 14:28:47 +0000 Subject: [PATCH 003/182] Tokens and a Memcache plugin --- billingstack/identity/cms.py | 174 ++++++++++++++++++++++++ billingstack/identity/token_base.py | 84 ++++++++++++ billingstack/identity/token_memcache.py | 126 +++++++++++++++++ billingstack/identity/utils.py | 6 +- billingstack/utils.py | 13 +- setup.py | 3 + 6 files changed, 403 insertions(+), 3 deletions(-) create mode 100644 billingstack/identity/cms.py create mode 100644 billingstack/identity/token_base.py create mode 100644 billingstack/identity/token_memcache.py diff --git a/billingstack/identity/cms.py b/billingstack/identity/cms.py new file mode 100644 index 0000000..071a902 --- /dev/null +++ b/billingstack/identity/cms.py @@ -0,0 +1,174 @@ +import hashlib + +from billingstack.openstack.common import log + + +subprocess = None +LOG = log.getLogger(__name__) +PKI_ANS1_PREFIX = 'MII' + + +def _ensure_subprocess(): + # NOTE(vish): late loading subprocess so we can + # use the green version if we are in + # eventlet. + global subprocess + if not subprocess: + try: + from eventlet import patcher + if patcher.already_patched.get('os'): + from eventlet.green import subprocess + else: + import subprocess + except ImportError: + import subprocess + + +def cms_verify(formatted, signing_cert_file_name, ca_file_name): + """ + verifies the signature of the contents IAW CMS syntax + """ + _ensure_subprocess() + process = subprocess.Popen(["openssl", "cms", "-verify", + "-certfile", signing_cert_file_name, + "-CAfile", ca_file_name, + "-inform", "PEM", + "-nosmimecap", "-nodetach", + "-nocerts", "-noattr"], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + output, err = process.communicate(formatted) + retcode = process.poll() + if retcode: + LOG.error(_('Verify error: %s') % err) + raise subprocess.CalledProcessError(retcode, "openssl", output=err) + return output + + +def token_to_cms(signed_text): + copy_of_text = signed_text.replace('-', '/') + + formatted = "-----BEGIN CMS-----\n" + line_length = 64 + while len(copy_of_text) > 0: + if (len(copy_of_text) > line_length): + formatted += copy_of_text[:line_length] + copy_of_text = copy_of_text[line_length:] + else: + formatted += copy_of_text + copy_of_text = "" + formatted += "\n" + + formatted += "-----END CMS-----\n" + + return formatted + + +def verify_token(token, signing_cert_file_name, ca_file_name): + return cms_verify(token_to_cms(token), + signing_cert_file_name, + ca_file_name) + + +def is_ans1_token(token): + ''' + thx to ayoung for sorting this out. + + base64 decoded hex representation of MII is 3082 + In [3]: binascii.hexlify(base64.b64decode('MII=')) + Out[3]: '3082' + + re: http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf + + pg4: For tags from 0 to 30 the first octet is the identfier + pg10: Hex 30 means sequence, followed by the length of that sequence. + pg5: Second octet is the length octet + first bit indicates short or long form, next 7 bits encode the number + of subsequent octets that make up the content length octets as an + unsigned binary int + + 82 = 10000010 (first bit indicates long form) + 0000010 = 2 octets of content length + so read the next 2 octets to get the length of the content. + + In the case of a very large content length there could be a requirement to + have more than 2 octets to designate the content length, therefore + requiring us to check for MIM, MIQ, etc. + In [4]: base64.b64encode(binascii.a2b_hex('3083')) + Out[4]: 'MIM=' + In [5]: base64.b64encode(binascii.a2b_hex('3084')) + Out[5]: 'MIQ=' + Checking for MI would become invalid at 16 octets of content length + 10010000 = 90 + In [6]: base64.b64encode(binascii.a2b_hex('3090')) + Out[6]: 'MJA=' + Checking for just M is insufficient + + But we will only check for MII: + Max length of the content using 2 octets is 7FFF or 32767 + It's not practical to support a token of this length or greater in http + therefore, we will check for MII only and ignore the case of larger tokens + ''' + return token[:3] == PKI_ANS1_PREFIX + + +def cms_sign_text(text, signing_cert_file_name, signing_key_file_name): + """ Uses OpenSSL to sign a document + Produces a Base64 encoding of a DER formatted CMS Document + http://en.wikipedia.org/wiki/Cryptographic_Message_Syntax + """ + _ensure_subprocess() + process = subprocess.Popen(["openssl", "cms", "-sign", + "-signer", signing_cert_file_name, + "-inkey", signing_key_file_name, + "-outform", "PEM", + "-nosmimecap", "-nodetach", + "-nocerts", "-noattr"], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + output, err = process.communicate(text) + retcode = process.poll() + if retcode or "Error" in err: + if retcode == 3: + LOG.error(_("Signing error: Unable to load certificate - " + "ensure you've configured PKI with " + "'keystone-manage pki_setup'")) + else: + LOG.error(_('Signing error: %s') % err) + raise subprocess.CalledProcessError(retcode, "openssl") + return output + + +def cms_sign_token(text, signing_cert_file_name, signing_key_file_name): + output = cms_sign_text(text, signing_cert_file_name, signing_key_file_name) + return cms_to_token(output) + + +def cms_to_token(cms_text): + + start_delim = "-----BEGIN CMS-----" + end_delim = "-----END CMS-----" + signed_text = cms_text + signed_text = signed_text.replace('/', '-') + signed_text = signed_text.replace(start_delim, '') + signed_text = signed_text.replace(end_delim, '') + signed_text = signed_text.replace('\n', '') + + return signed_text + + +def cms_hash_token(token_id): + """ + return: for ans1_token, returns the hash of the passed in token + otherwise, returns what it was passed in. + """ + if token_id is None: + return None + if is_ans1_token(token_id): + hasher = hashlib.md5() + hasher.update(token_id) + return hasher.hexdigest() + else: + return token_id diff --git a/billingstack/identity/token_base.py b/billingstack/identity/token_base.py new file mode 100644 index 0000000..207a4d0 --- /dev/null +++ b/billingstack/identity/token_base.py @@ -0,0 +1,84 @@ +import copy +import datetime + +from oslo.config import cfg + +from billingstack import utils +from billingstack.identity import cms +from billingstack.openstack.common import timeutils +from billingstack.plugin import Plugin + + +cfg.CONF.register_group( + cfg.OptGroup(name='identity:token', title="Token configuration")) + + +cfg.CONF.register_opts([ + cfg.IntOpt('expiration', default=86400)], + group='identity:token') + + +def unique_id(token_id): + """Return a unique ID for a token. + + The returned value is useful as the primary key of a database table, + memcache store, or other lookup table. + + :returns: Given a PKI token, returns it's hashed value. Otherwise, returns + the passed-in value (such as a UUID token ID or an existing + hash). + """ + return cms.cms_hash_token(token_id) + + +def default_expire_time(): + """Determine when a fresh token should expire. + + Expiration time varies based on configuration (see ``[token] expiration``). + + :returns: a naive UTC datetime.datetime object + + """ + expiration = cfg.CONF['identity:token'].expiration + expire_delta = datetime.timedelta(seconds=expiration) + return timeutils.utcnow() + expire_delta + + +class TokenPlugin(Plugin): + __plugin_ns__ = 'billingstack.token' + __plugin_type__ = 'token' + + """ + Base for Token providers like Memcache, SQL, Redis..... + + Note: This is NOT responsable for user / password authentication. It's a + layer that manages tokens.... + """ + def get_token(self, token_id): + """ + Get a Token + + :param token_id: Token ID to get... + """ + raise NotImplementedError + + def delete_token(self, token_id): + """ + Delete a Token + + :param token_id: Token ID to delete. + """ + raise NotImplementedError + + def list_tokens(self): + """ + List tokens + """ + + def list_revoked(self): + """ + List out revoked Tokens. + """ + raise NotImplementedError + + diff --git a/billingstack/identity/token_memcache.py b/billingstack/identity/token_memcache.py new file mode 100644 index 0000000..76a1b97 --- /dev/null +++ b/billingstack/identity/token_memcache.py @@ -0,0 +1,126 @@ +import copy +import memcache + +from oslo.config import cfg + +from billingstack.identity.token_base import TokenPlugin +from billingstack.identity.token_base import default_expire_time, unique_id +from billingstack.openstack.common import jsonutils +from billingstack import utils + + +cfg.CONF.register_group( + cfg.OptGroup(name='token:memcache', title="Memcache")) + + +cfg.CONF.register_opts([ + cfg.StrOpt('memcache_servers', default='127.0.0.1:11211')], + group='token:memcache') + + +class MemcachePlugin(TokenPlugin): + __plugin_name__ = 'memcache' + + def __init__(self, client=None): + super(MemcachePlugin, self).__init__() + self._memcache_client = client + + @property + def client(self): + return self._memcache_client or self._get_memcache_client() + + def _get_memcache_client(self): + servers = cfg.CONF[self.name].memcache_servers.split(';') + self._memcache_client = memcache.Client(servers, debug=0) + return self._memcache_client + + def _prefix_token_id(self, token_id): + return 'token-%s' % token_id.encode('utf-8') + + def _prefix_user_id(self, user_id): + return 'usertokens-%s' % user_id.encode('utf-8') + + def get_token(self, token_id): + if token_id is None: + #FIXME(ekarlso): Better error here? + raise exceptions.NotFound + + ptk = self._prefix_token_id(token_id) + token = self.client.get(ptk) + + if token is None: + #FIXME(ekarlso): Better error here? + raise exceptions.NotFound + + return token + + def create_token(self, token_id, data): + data_copy = copy.deepcopy(data) + ptk = self._prefix_token_id(unique_id(token_id)) + + if not data_copy.get('expires'): + data_copy['expires'] = default_expire_time() + + kwargs = {} + + if data_copy['expires'] is not None: + expires_ts = utils.unixtime(data_copy['expires']) + kwargs['time'] = expires_ts + + self.client.set(ptk, data_copy, **kwargs) + + if 'id' in data['user']: + token_data = jsonutils.dumps(token_id) + user_id = data['user']['id'] + user_key = self._prefix_user_id(user_id) + + if not self.client.append(user_key, ',%s' % token_data): + if not self.client.add(user_key, token_data): + if not self.client.append(user_key, ',%s' % token_data): + msg = _('Unable to add token user list.') + raise exceptions.UnexpectedError(msg) + return copy.deepcopy(data_copy) + + def _add_to_revocation_list(self, data): + data_json = jsonutils.dumps(data) + if not self.client.append(self.revocation_key, ',%s' % data_json): + if not self.client.add(self.revocation_key, data_json): + if not self.client.append(self.revocation_key, + ',%s' % data_json): + msg = _('Unable to add token to revocation list.') + raise exceptions.UnexpectedError(msg) + + def delete_token(self, token_id): + # Test for existence + data = self.get_token(unique_id(token_id)) + ptk = self._prefix_token_id(unique_id(token_id)) + result = self.client.delete(ptk) + self._add_to_revocation_list(data) + return result + + def list_tokens(self, user_id, account_id=None, trust_id=None): + tokens = [] + user_key = self._prefix_user_id(user_id) + user_record = self.client.get(user_key) or "" + token_list = jsonutils.loads('[%s]' % user_record) + + for token_id in token_list: + ptk = self._prefix_token_id(token_id) + token_ref = self.client.get(ptk) + + if token_ref: + if account_id is not None: + account = token_ref.get('account') + if not account: + continue + if account.get('id') != account_id: + continue + + tokens.append(token_id) + return tokens + + def list_revoked_tokens(self): + list_json = self.client.get(self.revocation_key) + if list_json: + return jsonutils.loads('[%s]' % list_json) + return [] \ No newline at end of file diff --git a/billingstack/identity/utils.py b/billingstack/identity/utils.py index 3d9b1d1..fd7edb8 100644 --- a/billingstack/identity/utils.py +++ b/billingstack/identity/utils.py @@ -5,8 +5,10 @@ from billingstack import exceptions -cfg.CONF.register_opt( - cfg.IntOpt('crypt_strength', default=40000)) + +cfg.CONF.register_opts([ + cfg.IntOpt('crypt_strength', default=40000)], + group='service:identity_api') MAX_PASSWORD_LENGTH = 4096 diff --git a/billingstack/utils.py b/billingstack/utils.py index c11b80b..78f40a7 100644 --- a/billingstack/utils.py +++ b/billingstack/utils.py @@ -1,9 +1,11 @@ import os import pycountry import re +import time -from billingstack import exceptions from oslo.config import cfg + +from billingstack import exceptions from billingstack.openstack.common import log @@ -121,3 +123,12 @@ def _seen(col): map(lambda item: map(_seen, item.keys()), data) return list(columns) + +def unixtime(dt_obj): + """Format datetime object as unix timestamp + + :param dt_obj: datetime.datetime object + :returns: float + + """ + return time.mktime(dt_obj.utctimetuple()) \ No newline at end of file diff --git a/setup.py b/setup.py index 9b6eaed..885181b 100644 --- a/setup.py +++ b/setup.py @@ -67,6 +67,9 @@ [billingstack.identity_plugin] sqlalchemy = billingstack.identity.impl_sqlalchemy:SQLAlchemyPlugin + + [billingstack.token_plugin] + memcache = billingstack.identity.token_memcache:MemcachePlugin """), classifiers=[ 'Development Status :: 3 - Alpha', From 9d56d22814568154f936817529e454e5157cde45 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 14 Mar 2013 21:05:08 +0000 Subject: [PATCH 004/182] Not used --- bin/billingstack-db-manage | 28 ---------------------------- 1 file changed, 28 deletions(-) delete mode 100644 bin/billingstack-db-manage diff --git a/bin/billingstack-db-manage b/bin/billingstack-db-manage deleted file mode 100644 index c80febf..0000000 --- a/bin/billingstack-db-manage +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 New Dream Network, LLC (DreamHost) -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# Copied: Quantum -import os -import sys -sys.path.insert(0, os.getcwd()) - -from oslo.config import cfg - -from billingstack.storage.impl_sqlalchemy.migration.cli import main - - -main() From 11f2318645970ab32902c0c1402367fe8a963302 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 14 Mar 2013 21:13:31 +0000 Subject: [PATCH 005/182] PEP / Flakes fixes --- billingstack/api/__init__.py | 3 +- billingstack/api/base.py | 1 - billingstack/api/hooks.py | 4 +-- billingstack/api/root.py | 6 ++-- billingstack/api/v1/__init__.py | 2 +- billingstack/api/v1/models.py | 6 ++-- billingstack/central/rpcapi.py | 32 ++++++++++------- billingstack/central/service.py | 5 --- billingstack/exceptions.py | 1 - billingstack/identity/api/__init__.py | 5 +-- billingstack/identity/api/app.py | 2 +- billingstack/identity/api/v1.py | 8 ++--- billingstack/identity/base.py | 3 +- billingstack/identity/token_base.py | 4 --- billingstack/identity/token_memcache.py | 6 ++-- billingstack/manage/database.py | 13 ++++--- billingstack/manage/provider.py | 6 ++-- billingstack/sqlalchemy/api.py | 4 +-- billingstack/sqlalchemy/model_base.py | 2 +- .../storage/impl_sqlalchemy/__init__.py | 13 ++++--- .../migration/alembic_migrations/env.py | 4 +-- .../storage/impl_sqlalchemy/migration/cli.py | 6 ++-- .../storage/impl_sqlalchemy/models.py | 34 ++++++++++--------- billingstack/utils.py | 4 +-- 24 files changed, 91 insertions(+), 83 deletions(-) diff --git a/billingstack/api/__init__.py b/billingstack/api/__init__.py index 663af26..58bc8a7 100644 --- a/billingstack/api/__init__.py +++ b/billingstack/api/__init__.py @@ -22,7 +22,8 @@ cfg.IntOpt('api_port', default=9091, help='The port for the billing API server'), cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.StrOpt('storage_driver', default='sqlalchemy', help='Storage driver to use'), + cfg.StrOpt('storage_driver', default='sqlalchemy', + help='Storage driver to use'), ] cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:api') diff --git a/billingstack/api/base.py b/billingstack/api/base.py index 52e90b6..3b0b38a 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -92,4 +92,3 @@ def from_db(cls, values): Return a class of this object from values in the from_db """ return cls(**values) - diff --git a/billingstack/api/hooks.py b/billingstack/api/hooks.py index 123eedc..f93c3e4 100644 --- a/billingstack/api/hooks.py +++ b/billingstack/api/hooks.py @@ -3,11 +3,9 @@ from billingstack import storage from billingstack.central.rpcapi import CentralAPI -from billingstack.openstack.common import log from billingstack.openstack.common.context import RequestContext - class NoAuthHook(hooks.PecanHook): """ Simple auth - all requests will be is_admin=True @@ -39,4 +37,4 @@ def before(self, state): class RPCHook(hooks.PecanHook): def before(self, state): - state.request.central_api = CentralAPI() \ No newline at end of file + state.request.central_api = CentralAPI() diff --git a/billingstack/api/root.py b/billingstack/api/root.py index 3379301..3a6aab3 100644 --- a/billingstack/api/root.py +++ b/billingstack/api/root.py @@ -16,13 +16,11 @@ # License for the specific language governing permissions and limitations # under the License. -from pecan import expose, redirect -from webob.exc import status_map - +from pecan import expose from . import v1 -class RootController(object): +class RootController(object): v1 = v1.V1Controller() @expose(generic=True, template='index.html') diff --git a/billingstack/api/v1/__init__.py b/billingstack/api/v1/__init__.py index b508599..0e3c17b 100644 --- a/billingstack/api/v1/__init__.py +++ b/billingstack/api/v1/__init__.py @@ -1 +1 @@ -from .controllers import V1Controller \ No newline at end of file +from .controllers import V1Controller diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index c2fb504..269f75d 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -1,4 +1,4 @@ -from wsme.types import text, Unset, DictType +from wsme.types import text, DictType from billingstack.api.base import ModelBase, property_type @@ -36,6 +36,7 @@ class Currency(Base): name = text title = text + class Language(Base): id = text name = text @@ -115,7 +116,8 @@ class Merchant(Account): class Customer(Account): def __init__(self, **kw): - kw['contact_info'] = [ContactInfo(**i) for i in kw.get('contact_info', {})] + infos = kw.get('contact_info', {}) + kw['contact_info'] = [ContactInfo(**i) for i in infos] super(Customer, self).__init__(**kw) merchant_id = text diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 3273f76..8e6656e 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -3,9 +3,8 @@ from billingstack.openstack.common.rpc import proxy rpcapi_opts = [ - cfg.StrOpt('central_topic', - default='central', - help='the topic central nodes listen on') + cfg.StrOpt('central_topic', default='central', + help='the topic central nodes listen on') ] cfg.CONF.register_opts(rpcapi_opts) @@ -24,7 +23,8 @@ def currency_add(self, ctxt, values): return self.call(ctxt, self.make_msg('currency_add', values=values)) def currency_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('currency_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('currency_list', + criterion=criterion)) def currency_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('currency_get', @@ -43,14 +43,15 @@ def language_add(self, ctxt, values): return self.call(ctxt, self.make_msg('language_add', values=values)) def language_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('language_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('language_list', + criterion=criterion)) def language_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('language_get', id_=id_)) def language_update(self, ctxt, id_, values): return self.call(ctxt, self.make_msg('language_update', - language_id, values)) + id_, values)) def language_delete(self, ctxt, id_): return self.call(ctxt, self.make_msg('language_delete', id_=id_)) @@ -87,13 +88,14 @@ def pg_method_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('pg_method_list', id_=id_)) # PGC - def pg_config_add(self, ctxt, merchant_id, provider_id, values): + def pg_config_add(self, ctxt, merchant_id, provider_id, values): return self.call(ctxt, self.make_msg('pg_config_add', merchant_id=merchant_id, provider_id=provider_id, values=values)) def pg_config_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('pg_config_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('pg_config_list', + criterion=criterion)) def pg_config_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('pg_config_get', id_=id_)) @@ -130,7 +132,8 @@ def merchant_add(self, ctxt, values): return self.call(ctxt, self.make_msg('merchant_add', values=values)) def merchant_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('merchant_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('merchant_list', + criterion=criterion)) def merchant_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('merchant_get', id_=id_)) @@ -149,7 +152,8 @@ def customer_add(self, ctxt, merchant_id, values): merchant_id=merchant_id, values=values)) def customer_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('customer_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('customer_list', + criterion=criterion)) def customer_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('customer_get', id_=id_)) @@ -185,7 +189,8 @@ def product_add(self, ctxt, merchant_id, values): merchant_id=merchant_id, values=values)) def product_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('product_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('product_list', + criterion=criterion)) def product_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('product_get', id_=id_)) @@ -202,7 +207,8 @@ def plan_item_add(self, ctxt, values): return self.call(ctxt, self.make_msg('plan_item_add', values=values)) def plan_item_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('plan_item_list', criterion=criterion)) + return self.call(ctxt, self.make_msg('plan_item_list', + criterion=criterion)) def plan_item_get(self, ctxt, id_): return self.call(ctxt, self.make_msg('plan_item_get', id_=id_)) @@ -230,4 +236,4 @@ def plan_update(self, ctxt, id_, values): values=values)) def plan_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('plan_delete', id_=id_)) \ No newline at end of file + return self.call(ctxt, self.make_msg('plan_delete', id_=id_)) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 12d277b..fbfb1fd 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -1,13 +1,8 @@ import functools -import re from oslo.config import cfg from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc from billingstack.openstack.common.rpc import service as rpc_service -from stevedore.named import NamedExtensionManager -from billingstack import exceptions from billingstack import storage -from billingstack import utils cfg.CONF.import_opt('host', 'billingstack.netconf') diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py index 4b4e2f8..35f8cab 100644 --- a/billingstack/exceptions.py +++ b/billingstack/exceptions.py @@ -51,4 +51,3 @@ class Duplicate(Base): class NotFound(Base): pass - diff --git a/billingstack/identity/api/__init__.py b/billingstack/identity/api/__init__.py index 0d0ea3d..8a54949 100644 --- a/billingstack/identity/api/__init__.py +++ b/billingstack/identity/api/__init__.py @@ -21,7 +21,8 @@ cfg.IntOpt('api_port', default=9092, help='The port for the BS Identity API server'), cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.StrOpt('storage_driver', default='sqlalchemy', help='Storage driver to use'), + cfg.StrOpt('storage_driver', default='sqlalchemy', + help='Storage driver to use'), ] -cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:identity_api') \ No newline at end of file +cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:identity_api') diff --git a/billingstack/identity/api/app.py b/billingstack/identity/api/app.py index 381b9a7..837197f 100644 --- a/billingstack/identity/api/app.py +++ b/billingstack/identity/api/app.py @@ -58,4 +58,4 @@ def setup_app(pecan_config=None, extra_hooks=None): True), ) - return app \ No newline at end of file + return app diff --git a/billingstack/identity/api/v1.py b/billingstack/identity/api/v1.py index f911162..2749bf9 100644 --- a/billingstack/identity/api/v1.py +++ b/billingstack/identity/api/v1.py @@ -1,5 +1,4 @@ -from oslo.config import cfg -from pecan import hooks, request, expose, rest +from pecan import request, expose, rest import wsmeext.pecan as wsme_pecan from wsme.types import text, wsattr @@ -115,7 +114,8 @@ class AccountController(RestBase): @expose() def _lookup(self, *remainder): if remainder[0] == 'users' and remainder[2] == 'roles': - return AccountRolesController(self.id_, remainder[1], remainder[3]), () + return AccountRolesController(self.id_, remainder[1], + remainder[3]), () return super(AccountController, self)._lookup(remainder) @wsme_pecan.wsexpose(Account) @@ -213,4 +213,4 @@ class V1Controller(RestBase): class RootController(RestBase): - v1 = V1Controller() + v1 = V1Controller() diff --git a/billingstack/identity/base.py b/billingstack/identity/base.py index afa4817..ddfa906 100644 --- a/billingstack/identity/base.py +++ b/billingstack/identity/base.py @@ -18,7 +18,8 @@ def get_plugin(self, name=cfg.CONF['service:identity_api'].storage_driver, **kw): return super(IdentityPlugin, self).get_plugin(name, **kw) - def authenticate(self, context, user_id=None, password=None, account_id=None): + def authenticate(self, context, user_id=None, password=None, + account_id=None): """ Authenticate a User diff --git a/billingstack/identity/token_base.py b/billingstack/identity/token_base.py index 207a4d0..c2c0ff1 100644 --- a/billingstack/identity/token_base.py +++ b/billingstack/identity/token_base.py @@ -1,9 +1,7 @@ -import copy import datetime from oslo.config import cfg -from billingstack import utils from billingstack.identity import cms from billingstack.openstack.common import timeutils from billingstack.plugin import Plugin @@ -80,5 +78,3 @@ def list_revoked(self): List out revoked Tokens. """ raise NotImplementedError - - diff --git a/billingstack/identity/token_memcache.py b/billingstack/identity/token_memcache.py index 76a1b97..e246b96 100644 --- a/billingstack/identity/token_memcache.py +++ b/billingstack/identity/token_memcache.py @@ -3,8 +3,10 @@ from oslo.config import cfg +from billingstack import exceptions +from billingstack.openstack.common.gettextutils import _ from billingstack.identity.token_base import TokenPlugin -from billingstack.identity.token_base import default_expire_time, unique_id +from billingstack.identity.token_base import default_expire_time, unique_id from billingstack.openstack.common import jsonutils from billingstack import utils @@ -123,4 +125,4 @@ def list_revoked_tokens(self): list_json = self.client.get(self.revocation_key) if list_json: return jsonutils.loads('[%s]' % list_json) - return [] \ No newline at end of file + return [] diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py index a4f064e..85905e1 100644 --- a/billingstack/manage/database.py +++ b/billingstack/manage/database.py @@ -22,10 +22,15 @@ LOG = log.getLogger(__name__) -cfg.CONF.import_opt('storage_driver', 'billingstack.central', - group='service:central') -cfg.CONF.import_opt('database_connection', 'billingstack.storage.impl_sqlalchemy', - group='storage:sqlalchemy') +cfg.CONF.import_opt( + 'storage_driver', + 'billingstack.central', + group='service:central') + +cfg.CONF.import_opt( + 'database_connection', + 'billingstack.storage.impl_sqlalchemy', + group='storage:sqlalchemy') class DatabaseCommand(Command): diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py index 1ec0b0b..2ac1524 100644 --- a/billingstack/manage/provider.py +++ b/billingstack/manage/provider.py @@ -19,7 +19,7 @@ def execute(self, parsed_args): data = self.conn.pg_provider_list(context) for p in data: - p['methods'] = ", ".join( - [":".join([m[k] for k in ['type', 'name']])\ - for m in p['methods']]) + keys = ['type', 'name'] + methods = [":".join([m[k] for k in keys]) for m in p['methods']] + p['methods'] = ", ".join(methods) return data diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index 5743857..ce371d9 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -155,7 +155,8 @@ def _dict(self, row, extra=[]): def _kv_rows(self, rows, key='name', func=lambda i: i): """ - Return a Key, Value dict where the "key" will be the key and the row as value + Return a Key, Value dict where the "key" will be the key and the row + as value """ data = {} for row in rows: @@ -165,4 +166,3 @@ def _kv_rows(self, rows, key='name', func=lambda i: i): data_key = row[key] data[data_key] = func(row) return data - diff --git a/billingstack/sqlalchemy/model_base.py b/billingstack/sqlalchemy/model_base.py index b860981..4629048 100644 --- a/billingstack/sqlalchemy/model_base.py +++ b/billingstack/sqlalchemy/model_base.py @@ -137,4 +137,4 @@ def value(self): def value(self, value): data_type = type(value).__name__ self.data_type = data_type - self._value = value \ No newline at end of file + self._value = value diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 9830ce8..76abd01 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -48,7 +48,8 @@ def __init__(self, config_group): def base(self): return models.BASE - def set_properties(self, obj, properties, cls=None, rel_attr='properties', purge=False): + def set_properties(self, obj, properties, cls=None, rel_attr='properties', + purge=False): """ Set's a dict with key values on a relation on the row @@ -242,8 +243,8 @@ def _set_method(self, provider, method, existing, all_methods): try: all_methods[method_key].providers.append(provider) except KeyError: - msg = 'Provider %s tried to associate to non-existing method %s' \ - % (provider.name, method_key) + msg = 'Provider %s tried to associate to non-existing'\ + 'method %s' % (provider.name, method_key) LOG.error(msg) raise exceptions.ConfigurationError(msg) @@ -387,7 +388,8 @@ def customer_delete(self, ctxt, id_): def _product(self, row): product = dict(row) - product['properties'] = self._kv_rows(row.properties, func=lambda i: i['value']) + product['properties'] = self._kv_rows(row.properties, + func=lambda i: i['value']) return product def product_add(self, ctxt, merchant_id, values): @@ -479,7 +481,8 @@ def plan_item_delete(self, ctxt, id_): def _plan(self, row): plan = dict(row) - plan['properties'] = self._kv_rows(row.properties, func=lambda i: i['value']) + plan['properties'] = self._kv_rows(row.properties, + func=lambda i: i['value']) plan['plan_items'] = map(dict, row.plan_items) if row.plan_items\ else [] return plan diff --git a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py b/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py index 31068d9..419eff8 100644 --- a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py +++ b/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py @@ -23,7 +23,6 @@ from sqlalchemy import create_engine, pool from billingstack.storage.impl_sqlalchemy.models import ModelBase -from billingstack.openstack.common import importutils # this is the Alembic Config object, which provides @@ -51,7 +50,8 @@ def run_migrations_offline(): script output. """ - context.configure(url=billingstack_config['storage:sqlalchemy'].database_connection) + context.configure(url=billingstack_config['storage:sqlalchemy'] + .database_connection) with context.begin_transaction(): context.run_migrations(options=build_options()) diff --git a/billingstack/storage/impl_sqlalchemy/migration/cli.py b/billingstack/storage/impl_sqlalchemy/migration/cli.py index bbd59cf..51d4240 100644 --- a/billingstack/storage/impl_sqlalchemy/migration/cli.py +++ b/billingstack/storage/impl_sqlalchemy/migration/cli.py @@ -17,7 +17,6 @@ # @author: Mark McClain, DreamHost # Copied: Quantum import os -import sys from alembic import command as alembic_command from alembic import config as alembic_config @@ -115,8 +114,9 @@ def main(): config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini') ) - config.set_main_option('script_location', - 'billingstack.storage.impl_sqlalchemy.migration:alembic_migrations') + config.set_main_option( + 'script_location', + 'billingstack.storage.impl_sqlalchemy.migration:alembic_migrations') # attach the Quantum conf to the Alembic conf config.billingstack_config = CONF diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index b26b144..b9d68c1 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -11,20 +11,17 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -import re - from sqlalchemy import Column, Table, ForeignKey, UniqueConstraint -from sqlalchemy import Integer, Float, Enum, Boolean -from sqlalchemy import DateTime, Unicode, UnicodeText +from sqlalchemy import Integer, Float +from sqlalchemy import DateTime, Unicode from sqlalchemy.orm import relationship, backref -from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.ext.declarative import declarative_base, declared_attr from billingstack import utils from billingstack.openstack.common import log as logging from billingstack.sqlalchemy.types import JSON, UUID -from billingstack.sqlalchemy.model_base import (ModelBase, BaseMixin, - PropertyMixin) +from billingstack.sqlalchemy.model_base import ( + ModelBase, BaseMixin, PropertyMixin) LOG = logging.getLogger(__name__) @@ -48,7 +45,8 @@ class Language(BASE): title = Column(Unicode(100), nullable=False) -pg_provider_methods = Table('pg_provider_methods', BASE.metadata, +pg_provider_methods = Table( + 'pg_provider_methods', BASE.metadata, Column('provider_id', UUID, ForeignKey('pg_provider.id')), Column('method_id', UUID, ForeignKey('pg_method.id'))) @@ -172,10 +170,12 @@ class Merchant(BASE, BaseMixin): products = relationship('Product', backref='merchant') currency = relationship('Currency', uselist=False, backref='merchants') - currency_name = Column(Unicode(10), ForeignKey('currency.name'), nullable=False) + currency_name = Column(Unicode(10), ForeignKey('currency.name'), + nullable=False) language = relationship('Language', uselist=False, backref='merchants') - language_name = Column(Unicode(10), ForeignKey('language.name'), nullable=False) + language_name = Column(Unicode(10), ForeignKey('language.name'), + nullable=False) class PGAccountConfig(BASE, BaseMixin): @@ -278,10 +278,12 @@ class Invoice(BASE, BaseMixin): line_items = relationship('InvoiceLine', backref='invoice_lines') state = relationship('InvoiceState', backref='invoices') - state_id = Column(Unicode(60), ForeignKey('invoice_state.name'), nullable=False) + state_id = Column(Unicode(60), ForeignKey('invoice_state.name'), + nullable=False) currency = relationship('Currency', backref='invoices') - currency_name = Column(Unicode(10), ForeignKey('currency.name'), nullable=False) + currency_name = Column(Unicode(10), ForeignKey('currency.name'), + nullable=False) merchant = relationship('Merchant', backref='invoices') merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), @@ -313,7 +315,7 @@ class Pricing(BASE, BaseMixin): plan_item_id = Column(UUID, ForeignKey('plan_item.id', ondelete='CASCADE', onupdate='CASCADE')) product_id = Column(UUID, ForeignKey('product.id', ondelete='CASCADE', - onupdate='CASCADE')) + onupdate='CASCADE')) class Plan(BASE, BaseMixin): @@ -419,9 +421,9 @@ class Subscription(BASE, BaseMixin): payment_method = relationship('PaymentMethod', backref='subscriptions') payment_method_id = Column(UUID, ForeignKey('payment_method.id', - ondelete='CASCADE', - onupdate='CASCADE'), - nullable=False) + ondelete='CASCADE', + onupdate='CASCADE'), + nullable=False) class Usage(BASE, BaseMixin): diff --git a/billingstack/utils.py b/billingstack/utils.py index 78f40a7..dd6bb32 100644 --- a/billingstack/utils.py +++ b/billingstack/utils.py @@ -61,7 +61,7 @@ def get_country(country_obj, **kw): obj = country_obj.get(**kw) except KeyError: raise exceptions.InvalidObject(errors=kw) - return dict([(k, v) for k, v in obj.__dict__.items() \ + return dict([(k, v) for k, v in obj.__dict__.items() if not k.startswith('_')]) @@ -131,4 +131,4 @@ def unixtime(dt_obj): :returns: float """ - return time.mktime(dt_obj.utctimetuple()) \ No newline at end of file + return time.mktime(dt_obj.utctimetuple()) From 04156d9a3222e3fa2d1e3f81ebac5486ea0b9063 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 14 Mar 2013 21:24:01 +0000 Subject: [PATCH 006/182] Don't add users atm --- tools/dev_samples.py | 40 +++++++++++++++++++--------------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/tools/dev_samples.py b/tools/dev_samples.py index 70d14ed..e4e514b 100644 --- a/tools/dev_samples.py +++ b/tools/dev_samples.py @@ -4,20 +4,19 @@ from oslo.config import cfg -from billingstack.openstack.common import log as logging - from billingstack import service from billingstack.samples import get_samples from billingstack.storage import get_connection from billingstack.openstack.common.context import get_admin_context -from billingstack.storage.impl_sqlalchemy import models cfg.CONF.import_opt('storage_driver', 'billingstack.central', group='service:central') -cfg.CONF.import_opt('database_connection', 'billingstack.storage.impl_sqlalchemy', - group='storage:sqlalchemy') +cfg.CONF.import_opt( + 'database_connection', + 'billingstack.storage.impl_sqlalchemy', + group='storage:sqlalchemy') SAMPLES = get_samples() @@ -58,24 +57,24 @@ def get_fixture(name, fixture=0, values={}): customer = conn.customer_add( ctxt, merchant['id'], get_fixture('customer', values=country_data)) - contact_info = get_fixture('contact_info') + #contact_info = get_fixture('contact_info') - merchant_user = get_fixture('user') - merchant_user['username'] = 'demo_merchant' - merchant_user['contact_info'] = contact_info + #merchant_user = get_fixture('user') + #merchant_user['username'] = 'demo_merchant' + #merchant_user['contact_info'] = contact_info - merchant_user = conn.user_add( - ctxt, merchant['id'], merchant_user) + #merchant_user = conn.user_add( + #ctxt, merchant['id'], merchant_user) - customer_user = get_fixture('user') - customer_user['username'] = 'demo_customer' - customer_user['contact_info'] = contact_info - customer_user['customer_id'] = customer['id'] + #customer_user = get_fixture('user') + #customer_user['username'] = 'demo_customer' + #customer_user['contact_info'] = contact_info + #customer_user['customer_id'] = customer['id'] - customer_user = conn.user_add( - ctxt, - merchant['id'], - customer_user) + #customer_user = conn.user_add( + # ctxt, + # merchant['id'], + # customer_user) products = {} for p in samples['product']: @@ -87,8 +86,7 @@ def get_fixture(name, fixture=0, values={}): {'product_id': products['vcpus']}, {'product_id': products['root_disk_size']}, {'product_id': products['network.incoming.bytes']}, - {'product_id': products['network.outgoing.bytes']} - ]} + {'product_id': products['network.outgoing.bytes']}]} plan = get_fixture('plan', values=values) From 3d342c5124f776841a4a7a67a951d7e17b68bf97 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 14 Mar 2013 21:49:33 +0000 Subject: [PATCH 007/182] PEP --- bin/billingstack-api | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/billingstack-api b/bin/billingstack-api index 23fe8aa..ce82f48 100644 --- a/bin/billingstack-api +++ b/bin/billingstack-api @@ -26,7 +26,6 @@ from oslo.config import cfg from billingstack.api import app from billingstack import service -from billingstack.openstack.common import log as logging if __name__ == '__main__': @@ -38,7 +37,8 @@ if __name__ == '__main__': root = app.setup_app() # Create the WSGI server and start it - host, port = cfg.CONF['service:api'].api_listen, int(cfg.CONF['service:api'].api_port) + host = cfg.CONF['service:api'].api_listen + port = int(cfg.CONF['service:api'].api_port) srv = simple_server.make_server(host, port, root) print 'Starting server in PID %s' % os.getpid() From 1849616f62fd76f068250a5f5eadf350f41b93ba Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 14 Mar 2013 22:20:54 +0000 Subject: [PATCH 008/182] Make method names more persistant --- billingstack/api/v1/controllers.py | 58 ++-- billingstack/central/rpcapi.py | 212 ++++++------ billingstack/manage/provider.py | 2 +- billingstack/payment_gateway/base.py | 16 +- billingstack/payment_gateway/service.py | 4 +- .../storage/impl_sqlalchemy/__init__.py | 124 +++---- billingstack/tests/api/v1/test_customer.py | 20 +- billingstack/tests/api/v1/test_merchant.py | 12 +- billingstack/tests/api/v1/test_plan.py | 20 +- billingstack/tests/api/v1/test_product.py | 20 +- billingstack/tests/base.py | 46 +-- billingstack/tests/payment_gateway/base.py | 40 +-- billingstack/tests/storage/__init__.py | 310 +++++++++--------- 13 files changed, 442 insertions(+), 442 deletions(-) diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index 6848225..b7d1c0e 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -32,7 +32,7 @@ class CurrenciesController(RestBase): @wsme_pecan.wsexpose([models.Currency]) def get_all(self): - rows = request.central_api.currency_list(request.ctxt) + rows = request.central_api.list_currency(request.ctxt) return [models.Currency.from_db(r) for r in rows] @@ -42,7 +42,7 @@ class LanguagesController(RestBase): @wsme_pecan.wsexpose([models.Language]) def get_all(self): - rows = request.central_api.language_list(request.ctxt) + rows = request.central_api.list_language(request.ctxt) return [models.Language.from_db(r) for r in rows] @@ -53,7 +53,7 @@ class PGProvidersController(RestBase): """ @wsme_pecan.wsexpose([models.PGProvider]) def get_all(self): - rows = request.central_api.pg_provider_list(request.ctxt) + rows = request.central_api.list_pg_provider(request.ctxt) return [models.PGProvider.from_db(r) for r in rows] @@ -64,7 +64,7 @@ class PGMethodsController(RestBase): """ @wsme_pecan.wsexpose([models.PGMethod]) def get_all(self): - rows = request.central_api.pg_method_list(request.ctxt) + rows = request.central_api.list_pg_method(request.ctxt) return [models.PGMethod.from_db(r) for r in rows] @@ -75,13 +75,13 @@ class PlanController(RestBase): @wsme_pecan.wsexpose(models.Plan) def get_all(self): - row = request.central_api.plan_get(request.ctxt, self.id_) + row = request.central_api.get_plan(request.ctxt, self.id_) return models.Plan.from_db(row) @wsme_pecan.wsexpose(models.Plan, body=models.Plan) def put(self, body): - row = request.central_api.plan_update( + row = request.central_api.update_plan( request.ctxt, self.id_, body.to_db()) @@ -90,7 +90,7 @@ def put(self, body): @wsme_pecan.wsexpose() def delete(self): - request.central_api.plan_delete(request.ctxt, self.id_) + request.central_api.delete_plan(request.ctxt, self.id_) class PlansController(RestBase): @@ -98,13 +98,13 @@ class PlansController(RestBase): @wsme_pecan.wsexpose([models.Plan]) def get_all(self): - rows = request.central_api.plan_list(request.ctxt) + rows = request.central_api.list_plan(request.ctxt) return [models.Plan.from_db(r) for r in rows] @wsme_pecan.wsexpose(models.Plan, body=models.Plan) def post(self, body): - row = request.central_api.plan_add( + row = request.central_api.create_plan( request.ctxt, request.context['merchant_id'], body.to_db()) @@ -118,13 +118,13 @@ class PaymentMethodController(RestBase): @wsme_pecan.wsexpose(models.PaymentMethod, unicode) def get_all(self): - row = request.central_api.payment_method_get(request.ctxt, self.id_) + row = request.central_api.get_payment_method(request.ctxt, self.id_) return models.PaymentMethod.from_db(row) @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) def put(self, body): - row = request.central_api.payment_method_update( + row = request.central_api.update_payment_method( request.ctxt, self.id_, body.to_db()) @@ -133,7 +133,7 @@ def put(self, body): @wsme_pecan.wsexpose() def delete(self): - request.central_api.payment_method_delete(request.ctxt, self.id_) + request.central_api.delete_payment_method(request.ctxt, self.id_) class PaymentMethodsController(RestBase): @@ -146,7 +146,7 @@ def get_all(self): 'customer_id': request.context['customer_id'] } - rows = request.central_api.payment_method_list( + rows = request.central_api.list_payment_method( request.ctxt, criterion=criterion) @@ -154,7 +154,7 @@ def get_all(self): @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) def post(self, body): - row = request.central_api.payment_method_add( + row = request.central_api.create_payment_method( request.ctxt, request.context['customer_id'], body.to_db()) @@ -168,13 +168,13 @@ class ProductController(RestBase): @wsme_pecan.wsexpose(models.Product) def get_all(self): - row = request.central_api.product_get(request.ctxt, self.id_) + row = request.central_api.get_product(request.ctxt, self.id_) return models.Product.from_db(row) @wsme_pecan.wsexpose(models.Product, body=models.Product) def put(self, body): - row = request.central_api.product_update( + row = request.central_api.update_product( request.ctxt, self.id_, body.to_db()) @@ -183,7 +183,7 @@ def put(self, body): @wsme_pecan.wsexpose() def delete(self): - request.central_api.product_delete(request.ctxt, self.id_) + request.central_api.delete_product(request.ctxt, self.id_) class ProductsController(RestBase): @@ -191,13 +191,13 @@ class ProductsController(RestBase): @wsme_pecan.wsexpose([models.Product]) def get_all(self): - rows = request.central_api.product_list(request.ctxt) + rows = request.central_api.list_product(request.ctxt) return [models.Product.from_db(r) for r in rows] @wsme_pecan.wsexpose(models.Product, body=models.Product) def post(self, body): - row = request.central_api.product_add( + row = request.central_api.create_product( request.ctxt, request.context['merchant_id'], body.to_db()) @@ -215,13 +215,13 @@ class CustomerController(RestBase): @wsme_pecan.wsexpose(models.Customer, unicode) def get_all(self): - row = request.central_api.customer_get(request.ctxt, self.id_) + row = request.central_api.get_customer(request.ctxt, self.id_) return models.Customer.from_db(row) @wsme_pecan.wsexpose(models.Customer, body=models.Customer) def put(self, body): - row = request.central_api.customer_update( + row = request.central_api.update_customer( request.ctxt, self.id_, body.to_db()) @@ -230,7 +230,7 @@ def put(self, body): @wsme_pecan.wsexpose() def delete(self): - request.central_api.customer_delete(request.ctxt, self.id_) + request.central_api.delete_customer(request.ctxt, self.id_) class CustomersController(RestBase): @@ -239,14 +239,14 @@ class CustomersController(RestBase): @wsme_pecan.wsexpose([models.Customer]) def get_all(self): - rows = request.central_api.customer_list( + rows = request.central_api.list_customer( request.ctxt, criterion={"merchant_id": self.parent.id_}) return [models.Customer.from_db(r) for r in rows] @wsme_pecan.wsexpose(models.Customer, body=models.Customer) def post(self, body): - row = request.central_api.customer_add( + row = request.central_api.create_customer( request.ctxt, request.context['merchant_id'], body.to_db()) @@ -264,13 +264,13 @@ class MerchantController(RestBase): @wsme_pecan.wsexpose(models.Merchant) def get_all(self): - row = request.central_api.merchant_get(request.ctxt, self.id_) + row = request.central_api.get_merchant(request.ctxt, self.id_) return models.Merchant.from_db(row) @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) def put(self, body): - row = request.central_api.merchant_update( + row = request.central_api.update_merchant( request.ctxt, self.id_, body.to_db()) @@ -279,7 +279,7 @@ def put(self, body): @wsme_pecan.wsexpose() def delete(self): - request.central_api.merchant_delete(request.ctxt, self.id_) + request.central_api.delete_merchant(request.ctxt, self.id_) class MerchantsController(RestBase): @@ -288,13 +288,13 @@ class MerchantsController(RestBase): @wsme_pecan.wsexpose([models.Merchant]) def get_all(self): - rows = request.central_api.merchant_list(request.ctxt) + rows = request.central_api.list_merchant(request.ctxt) return [models.Merchant.from_db(i) for i in rows] @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) def post(self, body): - row = request.central_api.merchant_add( + row = request.central_api.create_merchant( request.ctxt, body.to_db()) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 8e6656e..9c3787f 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -19,151 +19,151 @@ def __init__(self): default_version=self.BASE_RPC_VERSION) # Currency - def currency_add(self, ctxt, values): - return self.call(ctxt, self.make_msg('currency_add', values=values)) + def create_currency(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_currency', values=values)) - def currency_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('currency_list', + def list_currency(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_currency', criterion=criterion)) - def currency_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('currency_get', + def get_currency(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_currency', id_=id_)) - def currency_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('currency_update', + def update_currency(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_currency', id_=id_, values=values)) - def currency_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('currency_delete', + def delete_currency(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_currency', id_=id_)) # Language - def language_add(self, ctxt, values): - return self.call(ctxt, self.make_msg('language_add', values=values)) + def create_language(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_language', values=values)) - def language_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('language_list', + def list_language(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_language', criterion=criterion)) - def language_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('language_get', id_=id_)) + def get_language(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_language', id_=id_)) - def language_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('language_update', + def update_language(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_language', id_, values)) - def language_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('language_delete', id_=id_)) + def delete_language(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_language', id_=id_)) # Contact Info - def contact_info_add(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('contact_info_add', id_=id_, + def create_contact_info(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('create_contact_info', id_=id_, values=values)) - def contact_info_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('contact_info_get', id_)) + def get_contact_info(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_contact_info', id_)) - def contact_info_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('contact_info_update', id_=id_, + def update_contact_info(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_contact_info', id_=id_, values=values)) - def contact_info_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('contact_info_delete', id_=id_)) + def delete_contact_info(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_contact_info', id_=id_)) # PGP - def pg_provider_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('pg_provider_list', + def list_pg_provider(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_provider', criterion=criterion)) - def pg_provider_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('pg_provider_get', id_=id_)) + def get_pg_provider(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_pg_provider', id_=id_)) # PGM - def pg_method_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('pg_method_list', + def list_pg_method(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_method', criterion=criterion)) - def pg_method_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('pg_method_list', id_=id_)) + def get_pg_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('list_pg_method', id_=id_)) # PGC - def pg_config_add(self, ctxt, merchant_id, provider_id, values): - return self.call(ctxt, self.make_msg('pg_config_add', + def create_pg_config(self, ctxt, merchant_id, provider_id, values): + return self.call(ctxt, self.make_msg('create_pg_config', merchant_id=merchant_id, provider_id=provider_id, values=values)) - def pg_config_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('pg_config_list', + def list_pg_config(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_config', criterion=criterion)) - def pg_config_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('pg_config_get', id_=id_)) + def get_pg_config(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_pg_config', id_=id_)) - def pg_config_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('pg_config_update', id_=id_, + def update_pg_config(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_pg_config', id_=id_, values=values)) - def pg_config_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('pg_config_delete', id_=id_)) + def delete_pg_config(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_pg_config', id_=id_)) # PaymentMethod - def payment_method_add(self, ctxt, customer_id, pg_method_id, values): - return self.call(ctxt, self.make_msg('payment_method_add', + def create_payment_method(self, ctxt, customer_id, pg_method_id, values): + return self.call(ctxt, self.make_msg('create_payment_method', customer_id=customer_id, pg_method_id=pg_method_id, values=values)) - def payment_method_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('payment_method_list', + def list_payment_method(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_payment_method', criterion=criterion)) - def payment_method_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('payment_method_get', id_=id_)) + def get_payment_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_payment_method', id_=id_)) - def payment_method_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('payment_method_update', id_=id_, + def update_payment_method(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_payment_method', id_=id_, values=values)) - def payment_method_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('payment_method_delete', id_=id_)) + def delete_payment_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_payment_method', id_=id_)) # Merchant - def merchant_add(self, ctxt, values): - return self.call(ctxt, self.make_msg('merchant_add', values=values)) + def create_merchant(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_merchant', values=values)) - def merchant_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('merchant_list', + def list_merchant(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_merchant', criterion=criterion)) - def merchant_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('merchant_get', id_=id_)) + def get_merchant(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_merchant', id_=id_)) - def merchant_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('merchant_update', + def update_merchant(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_merchant', id_=id_, values=values)) - def merchant_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('merchant_delete', + def delete_merchant(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_merchant', id_=id_)) # Customer - def customer_add(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('customer_add', + def create_customer(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_customer', merchant_id=merchant_id, values=values)) - def customer_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('customer_list', + def list_customer(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_customer', criterion=criterion)) - def customer_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('customer_get', id_=id_)) + def get_customer(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_customer', id_=id_)) - def customer_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('customer_update', + def update_customer(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_customer', id_=id_, values=values)) - def customer_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('customer_delete', id_=id_)) + def delete_customer(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_customer', id_=id_)) # User def user_add(self, ctxt, merchant_id, values): @@ -184,56 +184,56 @@ def user_delete(self, ctxt, id_): return self.call(ctxt, self.make_msg('user_delete', id_=id_)) # Products - def product_add(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('product_add', + def create_product(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_product', merchant_id=merchant_id, values=values)) - def product_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('product_list', + def list_product(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_product', criterion=criterion)) - def product_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('product_get', id_=id_)) + def get_product(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_product', id_=id_)) - def product_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('product_update', id_=id_, + def update_product(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_product', id_=id_, values=values)) - def product_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('product_delete', id_=id_)) + def delete_product(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_product', id_=id_)) # PlanItems - def plan_item_add(self, ctxt, values): - return self.call(ctxt, self.make_msg('plan_item_add', values=values)) + def create_plan_item(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_plan_item', values=values)) - def plan_item_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('plan_item_list', + def list_plan_item(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_plan_item', criterion=criterion)) - def plan_item_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('plan_item_get', id_=id_)) + def get_plan_item(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_plan_item', id_=id_)) - def plan_item_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('plan_item_update', id_=id_, + def update_plan_item(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_plan_item', id_=id_, values=values)) - def plan_item_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('plan_item_delete', id_=id_)) + def delete_plan_item(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_plan_item', id_=id_)) # Plans - def plan_add(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('plan_add', + def create_plan(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_plan', merchant_id=merchant_id, values=values)) - def plan_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('plan_list', criterion=criterion)) + def list_plan(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_plan', criterion=criterion)) - def plan_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('plan_get', id_=id_)) + def get_plan(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_plan', id_=id_)) - def plan_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('plan_update', id_=id_, + def update_plan(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_plan', id_=id_, values=values)) - def plan_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('plan_delete', id_=id_)) + def delete_plan(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_plan', id_=id_)) diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py index 2ac1524..a3ae244 100644 --- a/billingstack/manage/provider.py +++ b/billingstack/manage/provider.py @@ -16,7 +16,7 @@ def execute(self, parsed_args): class ProvidersList(DatabaseCommand, ListCommand): def execute(self, parsed_args): context = get_admin_context() - data = self.conn.pg_provider_list(context) + data = self.conn.list_pg_provider(context) for p in data: keys = ['type', 'name'] diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py index 085b94b..4c0c973 100644 --- a/billingstack/payment_gateway/base.py +++ b/billingstack/payment_gateway/base.py @@ -49,7 +49,7 @@ def get_connection(self): return get_connection() @classmethod - def account_add(self, values): + def create_account(self, values): """ Create a new Account @@ -57,7 +57,7 @@ def account_add(self, values): """ raise NotImplementedError - def account_get(self, id_): + def get_account(self, id_): """ List Accounts @@ -65,13 +65,13 @@ def account_get(self, id_): """ raise NotImplementedError - def account_list(self): + def list_account(self): """ List Accounts """ raise NotImplementedError - def account_delete(self, id_): + def delete_account(self, id_): """ Delete Account @@ -79,7 +79,7 @@ def account_delete(self, id_): """ raise NotImplementedError - def payment_method_add(self, account_id, values): + def create_payment_method(self, account_id, values): """ Create a new Credit Card or similar @@ -88,7 +88,7 @@ def payment_method_add(self, account_id, values): """ raise NotImplementedError - def payment_method_get(self, id_): + def get_payment_method(self, id_): """ Get a PaymentMethod @@ -96,7 +96,7 @@ def payment_method_get(self, id_): """ raise NotImplementedError - def payment_method_list(self, account_id): + def list_payment_method(self, account_id): """ List PaymentMethods @@ -104,7 +104,7 @@ def payment_method_list(self, account_id): """ raise NotImplementedError - def payment_method_delete(self, id_): + def delete_payment_method(self, id_): """ Delete a PaymentMethod """ diff --git a/billingstack/payment_gateway/service.py b/billingstack/payment_gateway/service.py index 2ca72ff..cde2dd0 100644 --- a/billingstack/payment_gateway/service.py +++ b/billingstack/payment_gateway/service.py @@ -32,7 +32,7 @@ def __init__(self, *args, **kwargs): # Get a storage connection self.central_api = CentralAPI() - def pg_provider_get(self, ctxt, pg_info): + def get_pg_provider(self, ctxt, pg_info): """ Work out a PGC config either from pg_info or via ctxt fetching it from central. Return the appropriate PGP for this info. @@ -41,7 +41,7 @@ def pg_provider_get(self, ctxt, pg_info): :param pg_info: Payment Gateway Config... """ - def account_add(self, ctxt, values, pg_config=None): + def create_account(self, ctxt, values, pg_config=None): """ Create an Account on the underlying provider diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 76abd01..171e4d7 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -78,7 +78,7 @@ def set_properties(self, obj, properties, cls=None, rel_attr='properties', row[rel_attr].remove(existing[key]) # Currency - def currency_add(self, ctxt, values): + def create_currency(self, ctxt, values): """ Add a supported currency to the database """ @@ -87,23 +87,23 @@ def currency_add(self, ctxt, values): self._save(row) return dict(row) - def currency_list(self, ctxt, **kw): + def list_currency(self, ctxt, **kw): rows = self._list(models.Currency, **kw) return map(dict, rows) - def currency_get(self, ctxt, id_): + def get_currency(self, ctxt, id_): row = self._get(models.Currency, id_) return dict(row) - def currency_update(self, ctxt, id_, values): + def update_currency(self, ctxt, id_, values): row = self._update(models.Currency, id_, values) return dict(row) - def currency_delete(self, ctxt, id_): + def delete_currency(self, ctxt, id_): self._delete(models.Currency, id_) # Language - def language_add(self, ctxt, values): + def create_language(self, ctxt, values): """ Add a supported language to the database """ @@ -112,23 +112,23 @@ def language_add(self, ctxt, values): self._save(row) return dict(row) - def language_list(self, ctxt, **kw): + def list_language(self, ctxt, **kw): rows = self._list(models.Language, **kw) return map(dict, rows) - def language_get(self, ctxt, id_): + def get_language(self, ctxt, id_): row = self._get(models.Language, id_) return dict(row) - def language_update(self, ctxt, id_, values): + def update_language(self, ctxt, id_, values): row = self._update(models.Language, id_, values) return dict(row) - def language_delete(self, ctxt, id_): + def delete_language(self, ctxt, id_): self._delete(models.Language, id_) # ContactInfo - def contact_info_add(self, ctxt, obj, values, cls=None, + def create_contact_info(self, ctxt, obj, values, cls=None, rel_attr='contact_info'): """ :param entity: The object to add the contact_info to @@ -155,13 +155,13 @@ def contact_info_add(self, ctxt, obj, values, cls=None, else: return rel_row - def contact_info_get(self, ctxt, id_): + def get_contact_info(self, ctxt, id_): self._get(models.ContactInfo, id_) - def contact_info_update(self, ctxt, id_, values): + def update_contact_info(self, ctxt, id_, values): return self._update(models.ContactInfo, id_, values) - def contact_info_delete(self, ctxt, id_): + def delete_contact_info(self, ctxt, id_): self._delete(models.ContactInfo, id_) # Payment Gateway Providers @@ -184,14 +184,14 @@ def pg_provider_register(self, ctxt, values, methods=[]): self._save(provider) return self._dict(provider, extra=['methods']) - def pg_provider_list(self, ctxt, **kw): + def list_pg_provider(self, ctxt, **kw): """ List available PG Providers """ rows = self._list(models.PGProvider, **kw) return [self._dict(r, extra=['methods']) for r in rows] - def pg_provider_get(self, ctxt, pgp_id): + def get_pg_provider(self, ctxt, pgp_id): row = self._get(models.PGProvider, pgp_id) return self._dict(row, extra=['methods']) @@ -213,7 +213,7 @@ def _set_provider_methods(self, ctxt, provider, config_methods): """ Helper method for setting the Methods for a Provider """ - rows = self.pg_method_list(ctxt, criterion={"owner_id": None}) + rows = self.list_pg_method(ctxt, criterion={"owner_id": None}) system_methods = self._kv_rows(rows, key=models.PGMethod.make_key) existing = self._get_provider_methods(provider) @@ -249,26 +249,26 @@ def _set_method(self, provider, method, existing, all_methods): raise exceptions.ConfigurationError(msg) # PGMethods - def pg_method_add(self, ctxt, values): + def create_pg_method(self, ctxt, values): row = models.PGMethod(**values) self._save(row) return dict(row) - def pg_method_list(self, ctxt, **kw): + def list_pg_method(self, ctxt, **kw): return self._list(models.PGMethod, **kw) - def pg_method_get(self, ctxt, id_): + def get_pg_method(self, ctxt, id_): return self._get(models.PGMethod, id_) - def pg_method_update(self, ctxt, id_, values): + def update_pg_method(self, ctxt, id_, values): row = self._update(models.PGMethod, id_, values) return dict(row) - def pg_method_delete(self, ctxt, id_): + def delete_pg_method(self, ctxt, id_): return self._delete(models.PGMethod, id_) # Payment Gateway Configuration - def pg_config_add(self, ctxt, merchant_id, provider_id, values): + def create_pg_config(self, ctxt, merchant_id, provider_id, values): merchant = self._get_id_or_name(models.Merchant, merchant_id) provider = self._get_id_or_name(models.PGProvider, provider_id) @@ -279,23 +279,23 @@ def pg_config_add(self, ctxt, merchant_id, provider_id, values): self._save(row) return dict(row) - def pg_config_list(self, ctxt, **kw): + def list_pg_config(self, ctxt, **kw): rows = self._list(models.PGAccountConfig, **kw) return map(dict, rows) - def pg_config_get(self, ctxt, id_): + def get_pg_config(self, ctxt, id_): row = self._get(models.PGAccountConfig, id_) return dict(row) - def pg_config_update(self, ctxt, id_, values): + def update_pg_config(self, ctxt, id_, values): row = self._update(models.PGAccountConfig, id_, values) return dict(row) - def pg_config_delete(self, ctxt, id_): + def delete_pg_config(self, ctxt, id_): self._delete(models.PGAccountConfig, id_) # PaymentMethod - def payment_method_add(self, ctxt, customer_id, pg_method_id, values): + def create_payment_method(self, ctxt, customer_id, pg_method_id, values): """ Configure a PaymentMethod like a CreditCard """ @@ -309,41 +309,41 @@ def payment_method_add(self, ctxt, customer_id, pg_method_id, values): self._save(row) return self._dict(row, extra=['provider_method']) - def payment_method_list(self, ctxt, **kw): + def list_payment_method(self, ctxt, **kw): rows = self._list(models.PaymentMethod, **kw) return [self._dict(row, extra=['provider_method']) for row in rows] - def payment_method_get(self, ctxt, id_, **kw): + def get_payment_method(self, ctxt, id_, **kw): row = self._get_id_or_name(models.PaymentMethod, id_) return self._dict(row, extra=['provider_method']) - def payment_method_update(self, ctxt, id_, values): + def update_payment_method(self, ctxt, id_, values): row = self._update(models.PaymentMethod, id_, values) return self._dict(row, extra=['provider_method']) - def payment_method_delete(self, ctxt, id_): + def delete_payment_method(self, ctxt, id_): self._delete(models.PaymentMethod, id_) # Merchant - def merchant_add(self, ctxt, values): + def create_merchant(self, ctxt, values): row = models.Merchant(**values) self._save(row) return dict(row) - def merchant_list(self, ctxt, **kw): + def list_merchant(self, ctxt, **kw): rows = self._list(models.Merchant, **kw) return map(dict, rows) - def merchant_get(self, ctxt, id_): + def get_merchant(self, ctxt, id_): row = self._get(models.Merchant, id_) return dict(row) - def merchant_update(self, ctxt, id_, values): + def update_merchant(self, ctxt, id_, values): row = self._update(models.Merchant, id_, values) return dict(row) - def merchant_delete(self, ctxt, id_): + def delete_merchant(self, ctxt, id_): self._delete(models.Merchant, id_) # Customer @@ -355,7 +355,7 @@ def _customer(self, row): else {} return data - def customer_add(self, ctxt, merchant_id, values): + def create_customer(self, ctxt, merchant_id, values): merchant = self._get(models.Merchant, merchant_id) contact_info = values.pop('contact_info', None) @@ -363,25 +363,25 @@ def customer_add(self, ctxt, merchant_id, values): merchant.customers.append(customer) if contact_info: - info_row = self.contact_info_add(ctxt, customer, contact_info) + info_row = self.create_contact_info(ctxt, customer, contact_info) customer.default_info = info_row self._save(customer) return self._customer(customer) - def customer_list(self, ctxt, **kw): + def list_customer(self, ctxt, **kw): rows = self._list(models.Customer, **kw) return map(dict, rows) - def customer_get(self, ctxt, id_): + def get_customer(self, ctxt, id_): row = self._get(models.Customer, id_) return self._customer(row) - def customer_update(self, ctxt, id_, values): + def update_customer(self, ctxt, id_, values): row = self._update(models.Customer, id_, values) return self._customer(row) - def customer_delete(self, ctxt, id_): + def delete_customer(self, ctxt, id_): return self._delete(models.Customer, id_) # Products @@ -392,7 +392,7 @@ def _product(self, row): func=lambda i: i['value']) return product - def product_add(self, ctxt, merchant_id, values): + def create_product(self, ctxt, merchant_id, values): """ Add a new Product @@ -411,7 +411,7 @@ def product_add(self, ctxt, merchant_id, values): self._save(product) return self._product(product) - def product_list(self, ctxt, **kw): + def list_product(self, ctxt, **kw): """ List Products @@ -420,7 +420,7 @@ def product_list(self, ctxt, **kw): rows = self._list(models.Product, **kw) return map(self._product, rows) - def product_get(self, ctxt, id_): + def get_product(self, ctxt, id_): """ Get a Product @@ -429,7 +429,7 @@ def product_get(self, ctxt, id_): row = self._get(models.Product, id_) return self._product(row) - def product_update(self, ctxt, id_, values): + def update_product(self, ctxt, id_, values): """ Update a Product @@ -446,7 +446,7 @@ def product_update(self, ctxt, id_, values): self._save(row) return self._product(row) - def product_delete(self, ctxt, id_): + def delete_product(self, ctxt, id_): """ Delete a Product @@ -455,26 +455,26 @@ def product_delete(self, ctxt, id_): self._delete(models.Product, id_) # PlanItem - def plan_item_add(self, ctxt, values, save=True): + def create_plan_item(self, ctxt, values, save=True): ref = models.PlanItem() - return self._plan_item_update(ref, values, save=save) + return self._update_plan_item(ref, values, save=save) - def plan_item_update(self, ctxt, item, values, save=True): - return self._plan_item_update(item, values, save=save) + def update_plan_item(self, ctxt, item, values, save=True): + return self._update_plan_item(item, values, save=save) - def _plan_item_update(self, item, values, save=True): + def _update_plan_item(self, item, values, save=True): row = self._get_row(item, models.PlanItem) row.update(values) return self._save(row, save=save) - def plan_item_list(self, ctxt, **kw): + def list_plan_item(self, ctxt, **kw): return self._list(models.PlanItem, **kw) - def plan_item_get(self, ctxt, id_): + def get_plan_item(self, ctxt, id_): row = self._get(models.PlanItem, id_) return dict(row) - def plan_item_delete(self, ctxt, id_): + def delete_plan_item(self, ctxt, id_): self._delete(models.PlanItem, id_) # Plan @@ -487,7 +487,7 @@ def _plan(self, row): else [] return plan - def plan_add(self, ctxt, merchant_id, values): + def create_plan(self, ctxt, merchant_id, values): """ Add a new Plan @@ -505,13 +505,13 @@ def plan_add(self, ctxt, merchant_id, values): self.set_properties(plan, properties) for i in items: - item_row = self.plan_item_add(ctxt, i, save=False) + item_row = self.create_plan_item(ctxt, i, save=False) plan.plan_items.append(item_row) self._save(plan) return self._plan(plan) - def plan_list(self, ctxt, **kw): + def list_plan(self, ctxt, **kw): """ List Plan @@ -520,7 +520,7 @@ def plan_list(self, ctxt, **kw): rows = self._list(models.Plan, **kw) return map(self._plan, rows) - def plan_get(self, ctxt, id_): + def get_plan(self, ctxt, id_): """ Get a Plan @@ -529,7 +529,7 @@ def plan_get(self, ctxt, id_): row = self._get(models.Plan, id_) return self._plan(row) - def plan_update(self, ctxt, id_, values): + def update_plan(self, ctxt, id_, values): """ Update a Plan @@ -546,7 +546,7 @@ def plan_update(self, ctxt, id_, values): self._save(row) return self._plan(row) - def plan_delete(self, ctxt, id_): + def delete_plan(self, ctxt, id_): """ Delete a Plan diff --git a/billingstack/tests/api/v1/test_customer.py b/billingstack/tests/api/v1/test_customer.py index 21acb1a..4b83ce2 100644 --- a/billingstack/tests/api/v1/test_customer.py +++ b/billingstack/tests/api/v1/test_customer.py @@ -31,7 +31,7 @@ def fixture(self): expected = Customer.from_db(fixture).as_dict() return expected - def test_customer_add(self): + def test_create_customer(self): expected = self.fixture() url = self.path % self.merchant['id'] @@ -40,19 +40,19 @@ def test_customer_add(self): self.assertData(expected, resp.json) - def test_customer_list(self): + def test_list_customer(self): url = self.path % self.merchant['id'] resp = self.get(url) self.assertLen(0, resp.json) - self.customer_add(self.merchant['id']) + self.create_customer(self.merchant['id']) resp = self.get(url) self.assertLen(1, resp.json) - def test_customer_get(self): - _, customer = self.customer_add(self.merchant['id']) + def test_get_customer(self): + _, customer = self.create_customer(self.merchant['id']) expected = Customer.from_db(customer).as_dict() @@ -61,8 +61,8 @@ def test_customer_get(self): self.assertData(expected, resp.json) - def test_customer_update(self): - _, customer = self.customer_add(self.merchant['id']) + def test_update_customer(self): + _, customer = self.create_customer(self.merchant['id']) expected = Customer.from_db(customer).as_dict() @@ -73,10 +73,10 @@ def test_customer_update(self): self.assertData(resp.json, customer) - def test_customer_delete(self): - _, customer = self.customer_add(self.merchant['id']) + def test_delete_customer(self): + _, customer = self.create_customer(self.merchant['id']) url = self.item_path(self.merchant['id'], customer['id']) self.delete(url) - self.assertLen(0, self.central_service.customer_list(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_customer(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_merchant.py b/billingstack/tests/api/v1/test_merchant.py index 977b741..27c27d8 100644 --- a/billingstack/tests/api/v1/test_merchant.py +++ b/billingstack/tests/api/v1/test_merchant.py @@ -30,31 +30,31 @@ def fixture(self): expected = Merchant.from_db(fixture).as_dict() return expected - def test_merchant_add(self): + def test_create_merchant(self): expected = self.fixture() resp = self.post('merchants', expected) self.assertData(expected, resp.json) - def test_merchant_list(self): + def test_list_merchant(self): resp = self.get('merchants') self.assertLen(1, resp.json) - def test_merchant_get(self): + def test_get_merchant(self): expected = Merchant.from_db(self.merchant).as_dict() resp = self.get('merchants/' + self.merchant['id']) self.assertData(expected, resp.json) - def test_merchant_update(self): + def test_update_merchant(self): expected = Merchant.from_db(self.merchant).as_dict() resp = self.put('merchants/' + self.merchant['id'], expected) self.assertData(expected, resp.json) - def test_merchant_delete(self): + def test_delete_merchant(self): self.delete('merchants/' + self.merchant['id']) - self.assertLen(0, self.central_service.merchant_list(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_merchant(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_plan.py b/billingstack/tests/api/v1/test_plan.py index d1fcdf1..b155415 100644 --- a/billingstack/tests/api/v1/test_plan.py +++ b/billingstack/tests/api/v1/test_plan.py @@ -24,7 +24,7 @@ class TestPlan(FunctionalTest): __test__ = True path = "merchants/%s/plans" - def test_plan_add(self): + def test_create_plan(self): fixture = self.get_fixture('plan') url = self.path % self.merchant['id'] @@ -33,24 +33,24 @@ def test_plan_add(self): self.assertData(fixture, resp.json) - def test_plan_list(self): - self.plan_add(self.merchant['id']) + def test_list_plan(self): + self.create_plan(self.merchant['id']) url = self.path % self.merchant['id'] resp = self.get(url) self.assertLen(1, resp.json) - def test_plan_get(self): - _, plan = self.plan_add(self.merchant['id']) + def test_get_plan(self): + _, plan = self.create_plan(self.merchant['id']) url = self.item_path(self.merchant['id'], plan['id']) resp = self.get(url) self.assertData(resp.json, plan) - def test_plan_update(self): - _, plan = self.plan_add(self.merchant['id']) + def test_update_plan(self): + _, plan = self.create_plan(self.merchant['id']) plan['name'] = 'test' url = self.item_path(self.merchant['id'], plan['id']) @@ -58,10 +58,10 @@ def test_plan_update(self): self.assertData(resp.json, plan) - def test_plan_delete(self): - _, plan = self.plan_add(self.merchant['id']) + def test_delete_plan(self): + _, plan = self.create_plan(self.merchant['id']) url = self.item_path(self.merchant['id'], plan['id']) self.delete(url) - self.assertLen(0, self.central_service.plan_list(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_plan(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_product.py b/billingstack/tests/api/v1/test_product.py index ad8c92e..0ec334b 100644 --- a/billingstack/tests/api/v1/test_product.py +++ b/billingstack/tests/api/v1/test_product.py @@ -28,7 +28,7 @@ class TestProduct(FunctionalTest): __test__ = True path = "merchants/%s/products" - def test_product_add(self): + def test_create_product(self): fixture = self.get_fixture('product') url = self.path % self.merchant['id'] @@ -36,24 +36,24 @@ def test_product_add(self): self.assertData(fixture, resp.json) - def test_product_list(self): - self.product_add(self.merchant['id']) + def test_list_product(self): + self.create_product(self.merchant['id']) url = self.path % self.merchant['id'] resp = self.get(url) self.assertLen(1, resp.json) - def test_product_get(self): - _, product = self.product_add(self.merchant['id']) + def test_get_product(self): + _, product = self.create_product(self.merchant['id']) url = self.item_path(self.merchant['id'], product['id']) resp = self.get(url) self.assertData(resp.json, product) - def test_product_update(self): - _, product = self.product_add(self.merchant['id']) + def test_update_product(self): + _, product = self.create_product(self.merchant['id']) product['name'] = 'test' url = self.item_path(self.merchant['id'], product['id']) @@ -61,10 +61,10 @@ def test_product_update(self): self.assertData(resp.json, product) - def test_product_delete(self): - _, product = self.product_add(self.merchant['id']) + def test_delete_product(self): + _, product = self.create_product(self.merchant['id']) url = self.item_path(self.merchant['id'], product['id']) self.delete(url) - self.assertLen(0, self.central_service.product_list(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_product(self.admin_ctxt)) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index cb715fe..76a2567 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -120,7 +120,7 @@ def tearDown(self): #policy.reset() storage.teardown_schema() super(TestCase, self).tearDown() - + def get_storage_driver(self): connection = storage.get_connection() @@ -133,20 +133,20 @@ def get_api_service(self): return api_service.Service() def setSamples(self): - _, self.pg_method = self.pg_method_add() - _, self.currency = self.currency_add() - _, self.language = self.language_add() - _, self.merchant = self.merchant_add() + _, self.pg_method = self.create_pg_method() + _, self.currency = self.create_currency() + _, self.language = self.create_language() + _, self.merchant = self.create_merchant() - def language_add(self, fixture=0, values={}, **kw): + def create_language(self, fixture=0, values={}, **kw): fixture = self.get_fixture('language', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.language_add(ctxt, fixture, **kw) + return fixture, self.central_service.create_language(ctxt, fixture, **kw) - def currency_add(self, fixture=0, values={}, **kw): + def create_currency(self, fixture=0, values={}, **kw): fixture = self.get_fixture('currency', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.currency_add(ctxt, fixture, **kw) + return fixture, self.central_service.create_currency(ctxt, fixture, **kw) def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): methods = [self.get_fixture('pg_method')] or methods @@ -158,10 +158,10 @@ def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): fixture['methods'] = methods return fixture, data - def pg_method_add(self, fixture=0, values={}, **kw): + def create_pg_method(self, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_method') ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.pg_method_add(ctxt, fixture) + return fixture, self.central_service.create_pg_method(ctxt, fixture) def _account_defaults(self, values): # NOTE: Do defaults @@ -171,29 +171,29 @@ def _account_defaults(self, values): if not 'language_name' in values: values['language_name'] = self.language['name'] - def merchant_add(self, fixture=0, values={}, **kw): + def create_merchant(self, fixture=0, values={}, **kw): fixture = self.get_fixture('merchant', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.central_service.merchant_add(ctxt, fixture, **kw) + return fixture, self.central_service.create_merchant(ctxt, fixture, **kw) - def pg_config_add(self, provider_id, fixture=0, values={}, **kw): + def create_pg_config(self, provider_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.pg_config_add(ctxt, self.merchant['id'], provider_id, fixture, **kw) + return fixture, self.central_service.create_pg_config(ctxt, self.merchant['id'], provider_id, fixture, **kw) - def customer_add(self, merchant_id, fixture=0, values={}, **kw): + def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.central_service.customer_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.create_customer(ctxt, merchant_id, fixture, **kw) - def payment_method_add(self, customer_id, provider_method_id, fixture=0, values={}, **kw): + def create_payment_method(self, customer_id, provider_method_id, fixture=0, values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.payment_method_add( + return fixture, self.central_service.create_payment_method( ctxt, customer_id, provider_method_id, fixture, **kw) def user_add(self, merchant_id, fixture=0, values={}, **kw): @@ -201,12 +201,12 @@ def user_add(self, merchant_id, fixture=0, values={}, **kw): ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.central_service.user_add(ctxt, merchant_id, fixture, **kw) - def product_add(self, merchant_id, fixture=0, values={}, **kw): + def create_product(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('product', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.product_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.create_product(ctxt, merchant_id, fixture, **kw) - def plan_add(self, merchant_id, fixture=0, values={}, **kw): + def create_plan(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('plan', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.plan_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.create_plan(ctxt, merchant_id, fixture, **kw) diff --git a/billingstack/tests/payment_gateway/base.py b/billingstack/tests/payment_gateway/base.py index 3983445..f0b16fa 100644 --- a/billingstack/tests/payment_gateway/base.py +++ b/billingstack/tests/payment_gateway/base.py @@ -15,45 +15,45 @@ def setUp(self): super(ProviderTestCase, self).setUp() info = self.get_fixture('contact_info') - _, self.customer = self.customer_add( + _, self.customer = self.create_customer( self.merchant['id'], contact_info=info) _, self.provider = self.pg_provider_register() - def test_account_add(self): - expected = self.pgp.account_add(self.customer) + def test_create_account(self): + expected = self.pgp.create_account(self.customer) - def test_account_list(self): - expected = self.pgp.account_add(self.customer) - actual = self.pgp.account_list() + def test_list_account(self): + expected = self.pgp.create_account(self.customer) + actual = self.pgp.list_account() - def test_account_get(self): - expected = self.pgp.account_add(self.customer) - actual = self.pgp.account_get(self.customer['id']) + def test_get_account(self): + expected = self.pgp.create_account(self.customer) + actual = self.pgp.get_account(self.customer['id']) - def test_account_delete(self): - data = self.pgp.account_add(self.customer) - self.pgp.account_delete(data['id']) + def test_delete_account(self): + data = self.pgp.create_account(self.customer) + self.pgp.delete_account(data['id']) def pm_create(self): """ Create all the necassary things to make a card """ - fixture, data = self.payment_method_add( + fixture, data = self.create_payment_method( self.customer['id'], self.provider['methods'][0]['id']) - self.pgp.account_add(self.customer) - return fixture, self.pgp.payment_method_add(data) + self.pgp.create_account(self.customer) + return fixture, self.pgp.create_payment_method(data) - def test_payment_method_add(self): + def test_create_payment_method(self): fixture, pm = self.pm_create() - def test_payment_method_list(self): + def test_list_payment_method(self): fixture, pm = self.pm_create() - assert len(self.pgp.payment_method_list(self.customer['id'])) == 1 + assert len(self.pgp.list_payment_method(self.customer['id'])) == 1 - def test_payment_method_get(self): + def test_get_payment_method(self): fixture, pm = self.pm_create() - assert pm == self.pgp.payment_method_get(pm['id']) + assert pm == self.pgp.get_payment_method(pm['id']) diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 9c40bc9..41e1f80 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -34,15 +34,15 @@ def setUp(self): self.storage_conn = self.get_storage_driver() self.setSamples() - def language_add(self, fixture=0, values={}, **kw): + def create_language(self, fixture=0, values={}, **kw): fixture = self.get_fixture('language', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.language_add(ctxt, fixture, **kw) + return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) - def currency_add(self, fixture=0, values={}, **kw): + def create_currency(self, fixture=0, values={}, **kw): fixture = self.get_fixture('currency', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.currency_add(ctxt, fixture, **kw) + return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): methods = [self.get_fixture('pg_method')] or methods @@ -54,56 +54,56 @@ def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): fixture['methods'] = methods return fixture, data - def pg_method_add(self, fixture=0, values={}, **kw): + def create_pg_method(self, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_method') ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.pg_method_add(ctxt, fixture) + return fixture, self.storage_conn.create_pg_method(ctxt, fixture) - def merchant_add(self, fixture=0, values={}, **kw): + def create_merchant(self, fixture=0, values={}, **kw): fixture = self.get_fixture('merchant', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.storage_conn.merchant_add(ctxt, fixture, **kw) + return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - def pg_config_add(self, provider_id, fixture=0, values={}, **kw): + def create_pg_config(self, provider_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.pg_config_add(ctxt, self.merchant['id'], provider_id, fixture, **kw) + return fixture, self.storage_conn.create_pg_config(ctxt, self.merchant['id'], provider_id, fixture, **kw) - def customer_add(self, merchant_id, fixture=0, values={}, **kw): + def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.storage_conn.customer_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.storage_conn.create_customer(ctxt, merchant_id, fixture, **kw) - def payment_method_add(self, customer_id, provider_method_id, fixture=0, values={}, **kw): + def create_payment_method(self, customer_id, provider_method_id, fixture=0, values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.payment_method_add( + return fixture, self.storage_conn.create_payment_method( ctxt, customer_id, provider_method_id, fixture, **kw) - def product_add(self, merchant_id, fixture=0, values={}, **kw): + def create_product(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('product', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.product_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.storage_conn.create_product(ctxt, merchant_id, fixture, **kw) - def plan_add(self, merchant_id, fixture=0, values={}, **kw): + def create_plan(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('plan', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.plan_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.storage_conn.create_plan(ctxt, merchant_id, fixture, **kw) # Currencies - def test_currency_add(self): - self.assertDuplicate(self.currency_add) + def test_create_currency(self): + self.assertDuplicate(self.create_currency) # Languages - def test_language_add(self): - self.assertDuplicate(self.language_add) + def test_create_language(self): + self.assertDuplicate(self.create_language) def test_set_properties(self): - fixture, data = self.product_add(self.merchant['id']) + fixture, data = self.create_product(self.merchant['id']) metadata = {"random": True} self.storage_conn.set_properties(data['id'], metadata, cls=models.Product) @@ -111,7 +111,7 @@ def test_set_properties(self): metadata.update({'foo': 1, 'bar': 2}) self.storage_conn.set_properties(data['id'], metadata, cls=models.Product) - actual = self.storage_conn.product_get(self.admin_ctxt, data['id']) + actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) self.assertLen(4, actual['properties']) # Payment Gateways @@ -125,10 +125,10 @@ def test_pg_provider_register(self): def test_pg_provider_register_different_methods(self): # Add a Global method method1 = {'type': 'creditcard', 'name': 'mastercard'} - self.storage_conn.pg_method_add(self.admin_ctxt, method1) + self.storage_conn.create_pg_method(self.admin_ctxt, method1) method2 = {'type': 'creditcard', 'name': 'amex'} - self.storage_conn.pg_method_add(self.admin_ctxt, method2) + self.storage_conn.create_pg_method(self.admin_ctxt, method2) method3 = {'type': 'creditcard', 'name': 'visa', 'owned': 1} @@ -147,7 +147,7 @@ def test_pg_provider_register_method_switch_methods(self): 'type': 'creditcard', 'name': 'mastercard', 'title': "random"} - self.storage_conn.pg_method_add(self.admin_ctxt, system_method) + self.storage_conn.create_pg_method(self.admin_ctxt, system_method) provider = self.storage_conn.pg_provider_register( self.admin_ctxt, @@ -176,13 +176,13 @@ def test_pg_provider_register_method_switch_methods(self): self.assertLen(1, provider['methods']) self.assertData(system_method, provider['methods'][0]) - def test_pg_provider_get(self): + def test_get_pg_provider(self): _, expected = self.pg_provider_register() - actual = self.storage_conn.pg_provider_get(self.admin_ctxt, expected['id']) + actual = self.storage_conn.get_pg_provider(self.admin_ctxt, expected['id']) self.assertData(expected, actual) - def test_pg_provider_get_missing(self): - self.assertMissing(self.storage_conn.pg_provider_get, self.admin_ctxt, UUID) + def test_get_pg_provider_missing(self): + self.assertMissing(self.storage_conn.get_pg_provider, self.admin_ctxt, UUID) def test_pg_provider_deregister(self): _, data = self.pg_provider_register() @@ -193,237 +193,237 @@ def test_pg_provider_deregister_missing(self): self.assertMissing(self.storage_conn.pg_provider_deregister, self.admin_ctxt, UUID) # Payment Gateway Configuration - def test_pg_config_add(self): + def test_create_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.pg_config_add(provider['id']) + fixture, data = self.create_pg_config(provider['id']) self.assertData(fixture, data) - def test_pg_config_get(self): + def test_get_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.pg_config_add(provider['id']) + fixture, data = self.create_pg_config(provider['id']) - def test_pg_config_get_missing(self): - self.assertMissing(self.storage_conn.pg_config_get, self.admin_ctxt, UUID) + def test_get_pg_config_missing(self): + self.assertMissing(self.storage_conn.get_pg_config, self.admin_ctxt, UUID) - def test_pg_config_update(self): + def test_update_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.pg_config_add(provider['id']) + fixture, data = self.create_pg_config(provider['id']) fixture['configuration'] = {"api": 1} - updated = self.storage_conn.pg_config_update(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_pg_config(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) - def test_pg_config_update_missing(self): + def test_update_pg_config_missing(self): _, provider = self.pg_provider_register() - fixture, data = self.pg_config_add(provider['id']) + fixture, data = self.create_pg_config(provider['id']) - self.assertMissing(self.storage_conn.pg_config_update, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_pg_config, self.admin_ctxt, UUID, {}) - def test_pg_config_delete(self): + def test_delete_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.pg_config_add(provider['id']) + fixture, data = self.create_pg_config(provider['id']) - self.storage_conn.pg_config_delete(self.admin_ctxt,data['id']) - self.assertMissing(self.storage_conn.pg_config_get, self.admin_ctxt, data['id']) + self.storage_conn.delete_pg_config(self.admin_ctxt,data['id']) + self.assertMissing(self.storage_conn.get_pg_config, self.admin_ctxt, data['id']) - def test_pg_config_delete_missing(self): - self.assertMissing(self.storage_conn.pg_config_delete, self.admin_ctxt, UUID) + def test_delete_pg_config_missing(self): + self.assertMissing(self.storage_conn.delete_pg_config, self.admin_ctxt, UUID) # PaymentMethod - def test_payment_method_add(self): + def test_create_payment_method(self): _, provider = self.pg_provider_register() m_id = provider['methods'][0]['id'] - _, customer = self.customer_add(self.merchant['id']) + _, customer = self.create_customer(self.merchant['id']) - fixture, data = self.payment_method_add(customer['id'], m_id) + fixture, data = self.create_payment_method(customer['id'], m_id) self.assertData(fixture, data) - def test_payment_method_get(self): + def test_get_payment_method(self): _, provider = self.pg_provider_register() m_id = provider['methods'][0]['id'] - _, customer = self.customer_add(self.merchant['id']) + _, customer = self.create_customer(self.merchant['id']) - _, expected = self.payment_method_add(customer['id'], m_id) - actual = self.storage_conn.payment_method_get(self.admin_ctxt, expected['id']) + _, expected = self.create_payment_method(customer['id'], m_id) + actual = self.storage_conn.get_payment_method(self.admin_ctxt, expected['id']) self.assertData(expected, actual) # TODO(ekarlso): Make this test more extensive? - def test_payment_method_list(self): + def test_list_payment_method(self): # Setup a PGP with it's sample methods _, provider = self.pg_provider_register() m_id = provider['methods'][0]['id'] # Add two Customers with some methods - _, customer1 = self.customer_add(self.merchant['id']) - self.payment_method_add(customer1['id'], m_id) - rows = self.storage_conn.payment_method_list( + _, customer1 = self.create_customer(self.merchant['id']) + self.create_payment_method(customer1['id'], m_id) + rows = self.storage_conn.list_payment_method( self.admin_ctxt, criterion={'customer_id': customer1['id']}) self.assertLen(1, rows) - _, customer2 = self.customer_add(self.merchant['id']) - self.payment_method_add(customer2['id'], m_id) - self.payment_method_add(customer2['id'], m_id) + _, customer2 = self.create_customer(self.merchant['id']) + self.create_payment_method(customer2['id'], m_id) + self.create_payment_method(customer2['id'], m_id) - rows = self.storage_conn.payment_method_list( + rows = self.storage_conn.list_payment_method( self.admin_ctxt, criterion={'customer_id': customer2['id']}) self.assertLen(2, rows) - def test_payment_method_get_missing(self): - self.assertMissing(self.storage_conn.payment_method_get, self.admin_ctxt, UUID) + def test_get_payment_method_missing(self): + self.assertMissing(self.storage_conn.get_payment_method, self.admin_ctxt, UUID) - def test_payment_method_update(self): + def test_update_payment_method(self): _, provider = self.pg_provider_register() m_id = provider['methods'][0]['id'] - _, customer = self.customer_add(self.merchant['id']) + _, customer = self.create_customer(self.merchant['id']) - fixture, data = self.payment_method_add(customer['id'], m_id) + fixture, data = self.create_payment_method(customer['id'], m_id) fixture['identifier'] = 1 - updated = self.storage_conn.payment_method_update(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_payment_method(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) - def test_payment_method_update_missing(self): - self.assertMissing(self.storage_conn.payment_method_update, self.admin_ctxt, UUID, {}) + def test_update_payment_method_missing(self): + self.assertMissing(self.storage_conn.update_payment_method, self.admin_ctxt, UUID, {}) - def test_payment_method_delete(self): + def test_delete_payment_method(self): _, provider = self.pg_provider_register() - fixture, data = self.pg_config_add(provider['id']) + fixture, data = self.create_pg_config(provider['id']) - self.storage_conn.pg_config_delete(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.payment_method_delete, self.admin_ctxt, data['id']) + self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.delete_payment_method, self.admin_ctxt, data['id']) - def test_payment_method_delete_missing(self): - self.assertMissing(self.storage_conn.payment_method_delete, self.admin_ctxt, UUID) + def test_delete_payment_method_missing(self): + self.assertMissing(self.storage_conn.delete_payment_method, self.admin_ctxt, UUID) # Merchant - def test_merchant_add(self): - fixture, data = self.merchant_add() + def test_create_merchant(self): + fixture, data = self.create_merchant() self.assertData(fixture, data) - def test_merchant_get(self): - _, expected = self.merchant_add() - actual = self.storage_conn.merchant_get(self.admin_ctxt, expected['id']) + def test_get_merchant(self): + _, expected = self.create_merchant() + actual = self.storage_conn.get_merchant(self.admin_ctxt, expected['id']) self.assertData(expected, actual) - def test_merchant_get_missing(self): - self.assertMissing(self.storage_conn.merchant_get, self.admin_ctxt, UUID) + def test_get_merchant_missing(self): + self.assertMissing(self.storage_conn.get_merchant, self.admin_ctxt, UUID) - def test_merchant_update(self): - fixture, data = self.merchant_add() + def test_update_merchant(self): + fixture, data = self.create_merchant() fixture['name'] = 'test' - updated = self.storage_conn.merchant_update(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_merchant(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) - def test_merchant_update_missing(self): - self.assertMissing(self.storage_conn.merchant_update, self.admin_ctxt, UUID, {}) + def test_update_merchant_missing(self): + self.assertMissing(self.storage_conn.update_merchant, self.admin_ctxt, UUID, {}) - def test_merchant_delete(self): - self.storage_conn.merchant_delete(self.admin_ctxt, self.merchant['id']) - self.assertMissing(self.storage_conn.merchant_get, self.admin_ctxt, self.merchant['id']) + def test_delete_merchant(self): + self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) + self.assertMissing(self.storage_conn.get_merchant, self.admin_ctxt, self.merchant['id']) - def test_merchant_delete_missing(self): - self.assertMissing(self.storage_conn.merchant_delete, self.admin_ctxt, UUID) + def test_delete_merchant_missing(self): + self.assertMissing(self.storage_conn.delete_merchant, self.admin_ctxt, UUID) # Customer - def test_customer_add(self): - fixture, data = self.customer_add(self.merchant['id']) + def test_create_customer(self): + fixture, data = self.create_customer(self.merchant['id']) assert data['default_info'] == {} assert data['contact_info'] == [] self.assertData(fixture, data) - def test_customer_add_with_contact_info(self): + def test_create_customer_with_contact_info(self): contact_fixture = self.get_fixture('contact_info') - customer_fixture, data = self.customer_add( + customer_fixture, data = self.create_customer( self.merchant['id'], values={'contact_info': contact_fixture}) self.assertData(customer_fixture, data) self.assertData(contact_fixture, data['default_info']) self.assertData(contact_fixture, data['contact_info'][0]) - def test_customer_get(self): - _, expected = self.customer_add(self.merchant['id']) - actual = self.storage_conn.customer_get(self.admin_ctxt, expected['id']) + def test_get_customer(self): + _, expected = self.create_customer(self.merchant['id']) + actual = self.storage_conn.get_customer(self.admin_ctxt, expected['id']) self.assertData(expected, actual) - def test_customer_get_missing(self): - self.assertMissing(self.storage_conn.customer_get, self.admin_ctxt, UUID) + def test_get_customer_missing(self): + self.assertMissing(self.storage_conn.get_customer, self.admin_ctxt, UUID) - def test_customer_update(self): - fixture, data = self.customer_add(self.merchant['id']) + def test_update_customer(self): + fixture, data = self.create_customer(self.merchant['id']) fixture['name'] = 'test' - updated = self.storage_conn.customer_update(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_customer(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) - def test_customer_update_missing(self): - self.assertMissing(self.storage_conn.customer_update, self.admin_ctxt, UUID, {}) + def test_update_customer_missing(self): + self.assertMissing(self.storage_conn.update_customer, self.admin_ctxt, UUID, {}) - def test_customer_delete(self): - _, data = self.customer_add(self.merchant['id']) - self.storage_conn.customer_delete(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.customer_get, self.admin_ctxt, data['id']) + def test_delete_customer(self): + _, data = self.create_customer(self.merchant['id']) + self.storage_conn.delete_customer(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_customer, self.admin_ctxt, data['id']) - def test_customer_delete_missing(self): - self.assertMissing(self.storage_conn.customer_delete, self.admin_ctxt, UUID) + def test_delete_customer_missing(self): + self.assertMissing(self.storage_conn.delete_customer, self.admin_ctxt, UUID) # Products - def test_product_add(self): - f, data = self.product_add(self.merchant['id']) + def test_create_product(self): + f, data = self.create_product(self.merchant['id']) self.assertData(f, data) - def test_product_get(self): - f, expected = self.product_add(self.merchant['id']) - actual = self.storage_conn.product_get(self.admin_ctxt, expected['id']) + def test_get_product(self): + f, expected = self.create_product(self.merchant['id']) + actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) self.assertData(expected, actual) - def test_product_get_missing(self): - self.assertMissing(self.storage_conn.product_get, self.admin_ctxt, UUID) + def test_get_product_missing(self): + self.assertMissing(self.storage_conn.get_product, self.admin_ctxt, UUID) - def test_product_update(self): - fixture, data = self.product_add(self.merchant['id']) + def test_update_product(self): + fixture, data = self.create_product(self.merchant['id']) fixture['name'] = 'test' - updated = self.storage_conn.product_update(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_product(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) - def test_product_update_missing(self): - self.assertMissing(self.storage_conn.product_update, self.admin_ctxt, UUID, {}) + def test_update_product_missing(self): + self.assertMissing(self.storage_conn.update_product, self.admin_ctxt, UUID, {}) - def test_product_delete(self): - fixture, data = self.product_add(self.merchant['id']) - self.storage_conn.product_delete(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.product_get, self.admin_ctxt, data['id']) + def test_delete_product(self): + fixture, data = self.create_product(self.merchant['id']) + self.storage_conn.delete_product(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_product, self.admin_ctxt, data['id']) - def test_product_delete_missing(self): - self.assertMissing(self.storage_conn.product_delete, self.admin_ctxt, UUID) + def test_delete_product_missing(self): + self.assertMissing(self.storage_conn.delete_product, self.admin_ctxt, UUID) # Plan - def test_plan_add_with_items(self): - _, p1 = self.product_add(self.merchant['id']) - _, p2 = self.product_add(self.merchant['id']) + def test_create_plan_with_items(self): + _, p1 = self.create_product(self.merchant['id']) + _, p2 = self.create_product(self.merchant['id']) values = { 'plan_items': [{'product_id': p1['id']}, {'product_id': p2['id']}] } - fixture, data = self.plan_add(self.merchant['id'], values=values) + fixture, data = self.create_plan(self.merchant['id'], values=values) self.assertData(fixture, data) - def test_plan_add_without_items(self): - fixture, data = self.plan_add(self.merchant['id']) + def test_create_plan_without_items(self): + fixture, data = self.create_plan(self.merchant['id']) self.assertData(fixture, data) - def test_plan_get(self): - fixture, data = self.plan_add(self.merchant['id']) - actual = self.storage_conn.plan_get(self.admin_ctxt, data['id']) + def test_get_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) # FIXME(ekarlso): This should test the actual items also? But atm there's an # error that if the value is int when getting added it's string when returned... @@ -431,24 +431,24 @@ def test_plan_get(self): self.assertEqual(data['title'], actual['title']) self.assertEqual(data['description'], actual['description']) - def test_plan_get_missing(self): - self.assertMissing(self.storage_conn.plan_get, self.admin_ctxt, UUID) + def test_get_plan_missing(self): + self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) - def test_plan_update(self): - fixture, data = self.plan_add(self.merchant['id']) + def test_update_plan(self): + fixture, data = self.create_plan(self.merchant['id']) fixture['name'] = 'test' - updated = self.storage_conn.plan_update(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_plan(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) - def test_plan_update_missing(self): - self.assertMissing(self.storage_conn.plan_update, self.admin_ctxt, UUID, {}) + def test_update_plan_missing(self): + self.assertMissing(self.storage_conn.update_plan, self.admin_ctxt, UUID, {}) - def test_plan_delete(self): - fixture, data = self.plan_add(self.merchant['id']) - self.storage_conn.plan_delete(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.plan_get, self.admin_ctxt, data['id']) + def test_delete_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + self.storage_conn.delete_plan(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, data['id']) - def test_plan_delete_missing(self): - self.assertMissing(self.storage_conn.plan_delete, self.admin_ctxt, UUID) \ No newline at end of file + def test_delete_plan_missing(self): + self.assertMissing(self.storage_conn.delete_plan, self.admin_ctxt, UUID) \ No newline at end of file From d60559770b388b5f0f458d13d4b3e2f22ed8617a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 07:57:38 +0000 Subject: [PATCH 009/182] Fix samples --- tools/dev_samples.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tools/dev_samples.py b/tools/dev_samples.py index e4e514b..9da6098 100644 --- a/tools/dev_samples.py +++ b/tools/dev_samples.py @@ -38,23 +38,23 @@ def get_fixture(name, fixture=0, values={}): currencies = {} for c in samples['currency']: - currencies[c['name']] = conn.currency_add(ctxt, c) + currencies[c['name']] = conn.create_currency(ctxt, c) languages = {} for l in samples['language']: - languages[l['name']] = conn.language_add(ctxt, l) + languages[l['name']] = conn.create_language(ctxt, l) for method in samples['pg_method']: - conn.pg_method_add(ctxt, method) + conn.create_pg_method(ctxt, method) country_data = { "currency_name": currencies['nok']['name'], "language_name": languages['nor']['name']} - merchant = conn.merchant_add( + merchant = conn.create_merchant( ctxt, get_fixture('merchant', values=country_data)) - customer = conn.customer_add( + customer = conn.create_customer( ctxt, merchant['id'], get_fixture('customer', values=country_data)) #contact_info = get_fixture('contact_info') @@ -78,7 +78,7 @@ def get_fixture(name, fixture=0, values={}): products = {} for p in samples['product']: - products[p['name']] = conn.product_add(ctxt, merchant['id'], p) + products[p['name']] = conn.create_product(ctxt, merchant['id'], p) values = { 'plan_items': [ @@ -90,4 +90,4 @@ def get_fixture(name, fixture=0, values={}): plan = get_fixture('plan', values=values) - conn.plan_add(ctxt, merchant['id'], get_fixture('plan')) + conn.create_plan(ctxt, merchant['id'], get_fixture('plan')) From 56f19c755682af4eb37996b59113821f0477da83 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 12:55:00 +0000 Subject: [PATCH 010/182] CRUD improvement * Add CRUD for invoice_state, language and currency * Make API helpers take by_name that makes it lookup a row by name. * Fixup billingstack.sqlalchemy.utils functions. --- billingstack/api/v1/__init__.py | 2 +- billingstack/api/v1/controllers.py | 100 ++++++++++++++++++ billingstack/api/v1/models.py | 16 +-- billingstack/central/rpcapi.py | 24 ++++- billingstack/samples_data/invoice_state.json | 7 ++ billingstack/sqlalchemy/api.py | 27 +++-- billingstack/sqlalchemy/utils.py | 25 ++++- .../storage/impl_sqlalchemy/__init__.py | 40 +++++-- .../storage/impl_sqlalchemy/models.py | 2 + billingstack/tests/api/v1/test_currency.py | 67 ++++++++++++ .../tests/api/v1/test_invoice_state.py | 68 ++++++++++++ billingstack/tests/api/v1/test_language.py | 67 ++++++++++++ billingstack/tests/base.py | 6 +- 13 files changed, 416 insertions(+), 35 deletions(-) create mode 100644 billingstack/samples_data/invoice_state.json create mode 100644 billingstack/tests/api/v1/test_currency.py create mode 100644 billingstack/tests/api/v1/test_invoice_state.py create mode 100644 billingstack/tests/api/v1/test_language.py diff --git a/billingstack/api/v1/__init__.py b/billingstack/api/v1/__init__.py index 0e3c17b..3d3035c 100644 --- a/billingstack/api/v1/__init__.py +++ b/billingstack/api/v1/__init__.py @@ -1 +1 @@ -from .controllers import V1Controller +from .controllers import V1Controller diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index b7d1c0e..16a595b 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -27,8 +27,29 @@ LOG = log.getLogger(__name__) +class CurrencyController(RestBase): + @wsme_pecan.wsexpose(models.Currency) + def get_all(self): + row = request.central_api.get_currency(request.ctxt, + self.id_) + return models.Currency.from_db(row) + + @wsme_pecan.wsexpose(models.Currency, body=models.Currency) + def put(self, body): + row = request.central_api.update_currency( + request.ctxt, + self.id_, + body.to_db()) + return models.Currency.from_db(row) + + @wsme_pecan.wsexpose() + def delete(self): + request.central_api.delete_currency(request.ctxt, self.id_) + + class CurrenciesController(RestBase): """Currsencies controller""" + __resource__ = CurrencyController @wsme_pecan.wsexpose([models.Currency]) def get_all(self): @@ -36,9 +57,38 @@ def get_all(self): return [models.Currency.from_db(r) for r in rows] + @wsme_pecan.wsexpose(models.Currency, body=models.Currency) + def post(self, body): + row = request.central_api.create_currency( + request.ctxt, + body.to_db()) + + return models.Currency.from_db(row) + + +class LanguageController(RestBase): + @wsme_pecan.wsexpose(models.Language) + def get_all(self): + row = request.central_api.get_language(request.ctxt, + self.id_) + return models.Language.from_db(row) + + @wsme_pecan.wsexpose(models.Language, body=models.Language) + def put(self, body): + row = request.central_api.update_language( + request.ctxt, + self.id_, + body.to_db()) + return models.Language.from_db(row) + + @wsme_pecan.wsexpose() + def delete(self): + request.central_api.delete_language(request.ctxt, self.id_) + class LanguagesController(RestBase): """Languages controller""" + __resource__ = LanguageController @wsme_pecan.wsexpose([models.Language]) def get_all(self): @@ -46,6 +96,14 @@ def get_all(self): return [models.Language.from_db(r) for r in rows] + @wsme_pecan.wsexpose(models.Language, body=models.Language) + def post(self, body): + row = request.central_api.create_language( + request.ctxt, + body.to_db()) + + return models.Language.from_db(row) + class PGProvidersController(RestBase): """ @@ -69,6 +127,47 @@ def get_all(self): return [models.PGMethod.from_db(r) for r in rows] +class InvoiceStateController(RestBase): + @wsme_pecan.wsexpose(models.InvoiceState) + def get_all(self): + row = request.central_api.get_invoice_state(request.ctxt, + self.id_) + return models.InvoiceState.from_db(row) + + @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) + def put(self, body): + row = request.central_api.update_invoice_state( + request.ctxt, + self.id_, + body.to_db()) + return models.InvoiceState.from_db(row) + + @wsme_pecan.wsexpose() + def delete(self): + request.central_api.delete_invoice_state(request.ctxt, self.id_) + + +class InvoiceStatecontroller(RestBase): + """ + PaymentGatewayProviders + """ + __resource__ = InvoiceStateController + + @wsme_pecan.wsexpose([models.InvoiceState]) + def get_all(self): + rows = request.central_api.list_invoice_state(request.ctxt) + + return [models.InvoiceState.from_db(r) for r in rows] + + @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) + def post(self, body): + row = request.central_api.create_invoice_state( + request.ctxt, + body.to_db()) + + return models.InvoiceState.from_db(row) + + # Plans class PlanController(RestBase): __id__ = 'plan' @@ -305,6 +404,7 @@ class V1Controller(RestBase): """Version 1 API controller.""" __resource__ = { + 'invoice-states': InvoiceStatecontroller, 'payment-gateway-providers': PGProvidersController, 'payment-gateway-methods': PGMethodsController } diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 269f75d..af4f426 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -31,16 +31,16 @@ def change_suffixes(data, keys, shorten=True, suffix='_name'): data[new] = data.pop(old) -class Currency(Base): - id = text - name = text - title = text +class Currency(DescribedBase): + pass -class Language(Base): - id = text - name = text - title = text +class Language(DescribedBase): + pass + + +class InvoiceState(DescribedBase): + pass class PaymentMethod(Base): diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 9c3787f..7097f68 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -51,11 +51,30 @@ def get_language(self, ctxt, id_): def update_language(self, ctxt, id_, values): return self.call(ctxt, self.make_msg('update_language', - id_, values)) + id_=id_, values=values)) def delete_language(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_language', id_=id_)) + # Invoice States + def create_invoice_state(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_invoice_state', + values=values)) + + def list_invoice_state(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoice_state', + criterion=criterion)) + + def get_invoice_state(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_invoice_state', id_=id_)) + + def update_invoice_state(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_invoice_state', + id_=id_, values=values)) + + def delete_invoice_state(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_invoice_state', id_=id_)) + # Contact Info def create_contact_info(self, ctxt, id_, values): return self.call(ctxt, self.make_msg('create_contact_info', id_=id_, @@ -204,7 +223,8 @@ def delete_product(self, ctxt, id_): # PlanItems def create_plan_item(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_plan_item', values=values)) + return self.call(ctxt, self.make_msg('create_plan_item', + values=values)) def list_plan_item(self, ctxt, criterion=None): return self.call(ctxt, self.make_msg('list_plan_item', diff --git a/billingstack/samples_data/invoice_state.json b/billingstack/samples_data/invoice_state.json new file mode 100644 index 0000000..1f3f7ba --- /dev/null +++ b/billingstack/samples_data/invoice_state.json @@ -0,0 +1,7 @@ +[ + { + "name": "pending", + "title": "Pending", + "description": "The invoice is in Pending state." + } +] \ No newline at end of file diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index ce371d9..8164a50 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -1,9 +1,8 @@ -from sqlalchemy import or_ from sqlalchemy.orm import exc from billingstack import exceptions from billingstack.openstack.common import log -from billingstack.sqlalchemy import model_base, session +from billingstack.sqlalchemy import model_base, session, utils LOG = log.getLogger(__name__) @@ -29,6 +28,7 @@ def teardown_schema(self): def _save(self, obj, save=True): if not save: return obj + try: obj.save(self.session) except exceptions.Duplicate: @@ -58,6 +58,14 @@ def _list(self, cls=None, query=None, criterion=None): else: return result + def _filter_id(self, cls, identifier, by_name): + if hasattr(cls, 'id') and utils.is_valid_id(identifier): + return cls.id == identifier + elif hasattr(cls, 'name') and by_name: + return cls.name == identifier + else: + raise exceptions.NotFound('No criterias matched') + def _get(self, cls, identifier, by_name=False): """ Get an instance of a Model matching ID @@ -66,12 +74,9 @@ def _get(self, cls, identifier, by_name=False): :param identifier: The ID to get :param by_name: Search by name as well as ID """ - filters = [cls.id == identifier] - if by_name: - filters.append(cls.name == identifier) + id_filter = self._filter_id(cls, identifier, by_name) - query = self.session.query(cls)\ - .filter(or_(*filters)) + query = self.session.query(cls).filter(id_filter) try: obj = query.one() @@ -86,7 +91,7 @@ def _get_id_or_name(self, *args, **kw): kw['by_name'] = True return self._get(*args, **kw) - def _update(self, cls, id_, values): + def _update(self, cls, id_, values, by_name=False): """ Update an instance of a Model matching an ID with values @@ -94,7 +99,7 @@ def _update(self, cls, id_, values): :param id_: The ID to update :param values: The values to update the model instance with """ - obj = self._get(cls, id_) + obj = self._get_id_or_name(cls, id_, by_name=by_name) if 'id' in values and id_ != values['id']: msg = 'Not allowed to change id' errors = {'id': id_} @@ -106,14 +111,14 @@ def _update(self, cls, id_, values): raise return obj - def _delete(self, cls, id_): + def _delete(self, cls, id_, by_name=False): """ Delete an instance of a Model matching an ID :param cls: The model to try to delete :param id_: The ID to delete """ - obj = self._get(cls, id_) + obj = self._get(cls, id_, by_name=by_name) obj.delete(self.session) def _get_row(self, obj, cls=None, **kw): diff --git a/billingstack/sqlalchemy/utils.py b/billingstack/sqlalchemy/utils.py index 7c9a67f..1357869 100644 --- a/billingstack/sqlalchemy/utils.py +++ b/billingstack/sqlalchemy/utils.py @@ -1,12 +1,29 @@ from sqlalchemy.orm.properties import ColumnProperty, RelationshipProperty +from billingstack.openstack.common import uuidutils + + +def get_prop_dict(obj): + return dict([(p.key, p) for p in obj.__mapper__.iterate_properties]) def get_prop_names(obj, exclude=[]): + props = get_prop_dict(obj) + local, remote = [], [] - for p in obj.__mapper__.iterate_properties: - if p.key not in exclude: + for k, p in props.items(): + if k not in exclude: if isinstance(p, ColumnProperty): - local.append(p.key) + local.append(k) if isinstance(p, RelationshipProperty): - remote.append(p.key) + remote.append(k) return local, remote + + +def is_valid_id(id_): + """ + Return true if this is a valid ID for the cls.id + """ + if uuidutils.is_uuid_like(id_) or isinstance(id_, int): + return True + else: + return False diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 171e4d7..aa5e4e3 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -92,17 +92,17 @@ def list_currency(self, ctxt, **kw): return map(dict, rows) def get_currency(self, ctxt, id_): - row = self._get(models.Currency, id_) + row = self._get_id_or_name(models.Currency, id_) return dict(row) def update_currency(self, ctxt, id_, values): - row = self._update(models.Currency, id_, values) + row = self._update(models.Currency, id_, values, by_name=True) return dict(row) def delete_currency(self, ctxt, id_): - self._delete(models.Currency, id_) + self._delete(models.Currency, id_, by_name=True) - # Language + # Language def create_language(self, ctxt, values): """ Add a supported language to the database @@ -117,19 +117,43 @@ def list_language(self, ctxt, **kw): return map(dict, rows) def get_language(self, ctxt, id_): - row = self._get(models.Language, id_) + row = self._get_id_or_name(models.Language, id_) return dict(row) def update_language(self, ctxt, id_, values): - row = self._update(models.Language, id_, values) + row = self._update(models.Language, id_, values, by_name=True) return dict(row) def delete_language(self, ctxt, id_): - self._delete(models.Language, id_) + self._delete(models.Language, id_, by_name=True) + + # Invoice States + def create_invoice_state(self, ctxt, values): + """ + Add a supported invoice_state to the database + """ + row = models.InvoiceState(**values) + self._save(row) + return dict(row) + + def list_invoice_state(self, ctxt, **kw): + rows = self._list(models.InvoiceState, **kw) + return map(dict, rows) + + def get_invoice_state(self, ctxt, id_): + row = self._get_id_or_name(models.InvoiceState, id_) + return dict(row) + + def update_invoice_state(self, ctxt, id_, values): + row = self._update(models.InvoiceState, id_, values, by_name=True) + return dict(row) + + def delete_invoice_state(self, ctxt, id_): + self._delete(models.InvoiceState, id_, by_name=True) # ContactInfo def create_contact_info(self, ctxt, obj, values, cls=None, - rel_attr='contact_info'): + rel_attr='contact_info'): """ :param entity: The object to add the contact_info to :param values: The values to add diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index b9d68c1..556ac9f 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -258,6 +258,8 @@ class InvoiceState(BASE): Completed, Failed """ name = Column(Unicode(60), nullable=False, primary_key=True) + title = Column(Unicode(100), nullable=False) + description = Column(Unicode(255)) class Invoice(BASE, BaseMixin): diff --git a/billingstack/tests/api/v1/test_currency.py b/billingstack/tests/api/v1/test_currency.py new file mode 100644 index 0000000..1e248b4 --- /dev/null +++ b/billingstack/tests/api/v1/test_currency.py @@ -0,0 +1,67 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific currency governing permissions and limitations +# under the License. +""" +Test Currency +""" + +import logging + +from billingstack.tests.api.v1.base import FunctionalTest + +LOG = logging.getLogger(__name__) + + +class TestCurrency(FunctionalTest): + __test__ = True + path = "currencies" + + def test_create_currency(self): + fixture = self.get_fixture('currency', fixture=1) + + resp = self.post(self.path, fixture) + + self.assertData(fixture, resp.json) + + def test_list_currency(self): + + resp = self.get(self.path) + + self.assertLen(1, resp.json) + + def test_get_currency(self): + _, currency = self.create_currency(fixture=1) + + url = self.item_path(currency['name']) + resp = self.get(url) + + self.assertData(resp.json, currency) + + def test_update_currency(self): + _, currency = self.create_currency(fixture=1) + + url = self.item_path(currency['name']) + resp = self.put(url, currency) + + self.assertData(resp.json, currency) + + def test_delete_currency(self): + _, currency = self.create_currency(fixture=1) + + url = self.item_path(currency['name']) + self.delete(url) + + data = self.central_service.list_currency(self.admin_ctxt) + self.assertLen(1, data) diff --git a/billingstack/tests/api/v1/test_invoice_state.py b/billingstack/tests/api/v1/test_invoice_state.py new file mode 100644 index 0000000..04edb49 --- /dev/null +++ b/billingstack/tests/api/v1/test_invoice_state.py @@ -0,0 +1,68 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Test InvoiceState +""" + +import logging + +from billingstack.tests.api.v1.base import FunctionalTest + +LOG = logging.getLogger(__name__) + + +class TestInvoiceState(FunctionalTest): + __test__ = True + path = "invoice-states" + + def test_create_invoice_state(self): + fixture = self.get_fixture('invoice_state') + + resp = self.post(self.path, fixture) + + self.assertData(fixture, resp.json) + + def test_list_invoice_state(self): + self.create_invoice_state() + + resp = self.get(self.path) + + self.assertLen(1, resp.json) + + def test_get_invoice_state(self): + _, state = self.create_invoice_state() + + url = self.item_path(state['name']) + resp = self.get(url) + + self.assertData(resp.json, state) + + def test_update_invoice_state(self): + _, state = self.create_invoice_state() + + url = self.item_path(state['name']) + resp = self.put(url, state) + + self.assertData(resp.json, state) + + def test_delete_invoice_state(self): + _, state = self.create_invoice_state() + + url = self.item_path(state['name']) + self.delete(url) + + data = self.central_service.list_invoice_state(self.admin_ctxt) + self.assertLen(0, data) diff --git a/billingstack/tests/api/v1/test_language.py b/billingstack/tests/api/v1/test_language.py new file mode 100644 index 0000000..c57d5b8 --- /dev/null +++ b/billingstack/tests/api/v1/test_language.py @@ -0,0 +1,67 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Test Language +""" + +import logging + +from billingstack.tests.api.v1.base import FunctionalTest + +LOG = logging.getLogger(__name__) + + +class TestLanguage(FunctionalTest): + __test__ = True + path = "languages" + + def test_create_language(self): + fixture = self.get_fixture('language', fixture=1) + + resp = self.post(self.path, fixture) + + self.assertData(fixture, resp.json) + + def test_list_language(self): + + resp = self.get(self.path) + + self.assertLen(1, resp.json) + + def test_get_language(self): + _, language = self.create_language(fixture=1) + + url = self.item_path(language['name']) + resp = self.get(url) + + self.assertData(resp.json, language) + + def test_update_language(self): + _, language = self.create_language(fixture=1) + + url = self.item_path(language['name']) + resp = self.put(url, language) + + self.assertData(resp.json, language) + + def test_delete_language(self): + _, language = self.create_language(fixture=1) + + url = self.item_path(language['name']) + self.delete(url) + + data = self.central_service.list_language(self.admin_ctxt) + self.assertLen(1, data) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 76a2567..3a093ae 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -121,7 +121,6 @@ def tearDown(self): storage.teardown_schema() super(TestCase, self).tearDown() - def get_storage_driver(self): connection = storage.get_connection() return connection @@ -148,6 +147,11 @@ def create_currency(self, fixture=0, values={}, **kw): ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.central_service.create_currency(ctxt, fixture, **kw) + def create_invoice_state(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('invoice_state', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.central_service.create_invoice_state(ctxt, fixture, **kw) + def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): methods = [self.get_fixture('pg_method')] or methods fixture = self.get_fixture('pg_provider', fixture, values) From 5ea1a27dc48c1e8b78e518a1ab1f5e56a3447ca4 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 14:45:07 +0000 Subject: [PATCH 011/182] Restructure a bit and add preliminary Invoices / Subscritions --- billingstack/api/v1/controllers.py | 189 +++++++++++--- billingstack/api/v1/models.py | 37 ++- billingstack/central/rpcapi.py | 88 ++++--- .../storage/impl_sqlalchemy/__init__.py | 240 +++++++++++++----- 4 files changed, 408 insertions(+), 146 deletions(-) diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index 16a595b..811eb38 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -197,7 +197,13 @@ class PlansController(RestBase): @wsme_pecan.wsexpose([models.Plan]) def get_all(self): - rows = request.central_api.list_plan(request.ctxt) + criterion = { + 'merchant_id': request.context['merchant_id'] + } + + rows = request.central_api.list_plan( + request.ctxt, + criterion=criterion) return [models.Plan.from_db(r) for r in rows] @@ -211,97 +217,196 @@ def post(self, body): return models.Plan.from_db(row) -class PaymentMethodController(RestBase): - """PaymentMethod controller""" - __id__ = 'payment_method' +# Products +class ProductController(RestBase): + __id__ = 'product' - @wsme_pecan.wsexpose(models.PaymentMethod, unicode) + @wsme_pecan.wsexpose(models.Product) def get_all(self): - row = request.central_api.get_payment_method(request.ctxt, self.id_) + row = request.central_api.get_product(request.ctxt, self.id_) - return models.PaymentMethod.from_db(row) + return models.Product.from_db(row) - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) + @wsme_pecan.wsexpose(models.Product, body=models.Product) def put(self, body): - row = request.central_api.update_payment_method( + row = request.central_api.update_product( request.ctxt, self.id_, body.to_db()) - return models.PaymentMethod.from_db(row) + return models.Product.from_db(row) @wsme_pecan.wsexpose() def delete(self): - request.central_api.delete_payment_method(request.ctxt, self.id_) + request.central_api.delete_product(request.ctxt, self.id_) -class PaymentMethodsController(RestBase): - """PaymentMethods controller""" - __resource__ = PaymentMethodController +class ProductsController(RestBase): + __resource__ = ProductController - @wsme_pecan.wsexpose([models.PaymentMethod], unicode) + @wsme_pecan.wsexpose([models.Product]) + def get_all(self): + rows = request.central_api.list_product(request.ctxt) + + return [models.Product.from_db(r) for r in rows] + + @wsme_pecan.wsexpose(models.Product, body=models.Product) + def post(self, body): + row = request.central_api.create_product( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Product.from_db(row) + + +# Invoice +class InvoiceController(RestBase): + __id__ = 'invoice' + + @wsme_pecan.wsexpose(models.Invoice) + def get_all(self): + row = request.central_api.get_invoice(request.ctxt, self.id_) + + return models.Invoice.from_db(row) + + @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) + def put(self, body): + row = request.central_api.update_invoice( + request.ctxt, + self.id_, + body.to_db()) + + return models.Invoice.from_db(row) + + @wsme_pecan.wsexpose() + def delete(self): + request.central_api.delete_invoice(request.ctxt, self.id_) + + +class InvoicesController(RestBase): + __resource__ = InvoiceController + + @wsme_pecan.wsexpose([models.Invoice]) def get_all(self): criterion = { - 'customer_id': request.context['customer_id'] + 'merchant_id': request.context['merchant_id'] } - rows = request.central_api.list_payment_method( + rows = request.central_api.list_invoice( request.ctxt, criterion=criterion) - return [models.PaymentMethod.from_db(r) for r in rows] + return [models.Invoice.from_db(r) for r in rows] - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) + @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) def post(self, body): - row = request.central_api.create_payment_method( + row = request.central_api.create_invoice( request.ctxt, - request.context['customer_id'], + request.context['merchant_id'], body.to_db()) - return models.PaymentMethod.from_db(row) + return models.Invoice.from_db(row) -# Products -class ProductController(RestBase): - __id__ = 'product' +# Subscription +class SubscriptionController(RestBase): + __id__ = 'subscription' - @wsme_pecan.wsexpose(models.Product) + @wsme_pecan.wsexpose(models.Subscription) def get_all(self): - row = request.central_api.get_product(request.ctxt, self.id_) + row = request.central_api.get_subscription(request.ctxt, self.id_) - return models.Product.from_db(row) + return models.Invoice.from_db(row) - @wsme_pecan.wsexpose(models.Product, body=models.Product) + @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) def put(self, body): - row = request.central_api.update_product( + row = request.central_api.update_subscription( request.ctxt, self.id_, body.to_db()) - return models.Product.from_db(row) + return models.Subscription.from_db(row) @wsme_pecan.wsexpose() def delete(self): - request.central_api.delete_product(request.ctxt, self.id_) + request.central_api.delete_subscription(request.ctxt, self.id_) -class ProductsController(RestBase): - __resource__ = ProductController +class SubscriptionsController(RestBase): + __resource__ = SubscriptionController - @wsme_pecan.wsexpose([models.Product]) + @wsme_pecan.wsexpose([models.Subscription]) def get_all(self): - rows = request.central_api.list_product(request.ctxt) + criterion = { + 'customer_id': request.context['customer_id'] + } - return [models.Product.from_db(r) for r in rows] + rows = request.central_api.list_subscription( + request.ctxt, + criterion=criterion) - @wsme_pecan.wsexpose(models.Product, body=models.Product) + return [models.Subscription.from_db(r) for r in rows] + + @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) def post(self, body): - row = request.central_api.create_product( + row = request.central_api.create_subscription( request.ctxt, request.context['merchant_id'], body.to_db()) - return models.Product.from_db(row) + return models.Subscription.from_db(row) + + +# PaymentMethod +class PaymentMethodController(RestBase): + """PaymentMethod controller""" + __id__ = 'payment_method' + + @wsme_pecan.wsexpose(models.PaymentMethod, unicode) + def get_all(self): + row = request.central_api.get_payment_method(request.ctxt, self.id_) + + return models.PaymentMethod.from_db(row) + + @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) + def put(self, body): + row = request.central_api.update_payment_method( + request.ctxt, + self.id_, + body.to_db()) + + return models.PaymentMethod.from_db(row) + + @wsme_pecan.wsexpose() + def delete(self): + request.central_api.delete_payment_method(request.ctxt, self.id_) + + +class PaymentMethodsController(RestBase): + """PaymentMethods controller""" + __resource__ = PaymentMethodController + + @wsme_pecan.wsexpose([models.PaymentMethod], unicode) + def get_all(self): + criterion = { + 'customer_id': request.context['customer_id'] + } + + rows = request.central_api.list_payment_method( + request.ctxt, + criterion=criterion) + + return [models.PaymentMethod.from_db(r) for r in rows] + + @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) + def post(self, body): + row = request.central_api.create_payment_method( + request.ctxt, + request.context['customer_id'], + body.to_db()) + + return models.PaymentMethod.from_db(row) # Customers @@ -310,6 +415,7 @@ class CustomerController(RestBase): __id__ = 'customer' __resource__ = { "payment-methods": PaymentMethodsController, + "subscriptions": SubscriptionsController } @wsme_pecan.wsexpose(models.Customer, unicode) @@ -357,8 +463,9 @@ class MerchantController(RestBase): __id__ = 'merchant' __resource__ = { "customers": CustomersController, + "invoices": InvoicesController, "plans": PlansController, - "products": ProductsController, + "products": ProductsController } @wsme_pecan.wsexpose(models.Merchant) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index af4f426..626b3ce 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -43,14 +43,6 @@ class InvoiceState(DescribedBase): pass -class PaymentMethod(Base): - name = text - identifier = text - expires = text - - properties = DictType(key_type=text, value_type=property_type) - - class PGMethod(DescribedBase): type = text @@ -82,6 +74,11 @@ class ContactInfo(Base): website = text + +class Plan(DescribedBase): + properties = DictType(key_type=text, value_type=property_type) + + class Product(DescribedBase): measure = text type = text @@ -89,7 +86,29 @@ class Product(DescribedBase): properties = DictType(key_type=text, value_type=property_type) -class Plan(DescribedBase): +class Invoice(Base): + identifier = text + sub_total = float + tax_percentage = float + tax_total = float + total = float + + +class Subscription(Base): + billing_day = int + resource_id = text + resource_type = text + + plan_id = text + customer_id = text + payment_method_id = text + + +class PaymentMethod(Base): + name = text + identifier = text + expires = text + properties = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 7097f68..c93a1b4 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -184,23 +184,42 @@ def update_customer(self, ctxt, id_, values): def delete_customer(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_customer', id_=id_)) - # User - def user_add(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('user_add', + # Plans + def create_plan(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_plan', merchant_id=merchant_id, values=values)) - def user_list(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('user_list', criterion=criterion)) + def list_plan(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_plan', criterion=criterion)) + + def get_plan(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_plan', id_=id_)) + + def update_plan(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_plan', id_=id_, + values=values)) - def user_get(self, ctxt, id_): - return self.call(ctxt, self.make_msg('user_get', id_=id_)) + def delete_plan(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_plan', id_=id_)) - def user_update(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('user_update', id_=id_, + # PlanItems + def create_plan_item(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_plan_item', values=values)) - def user_delete(self, ctxt, id_): - return self.call(ctxt, self.make_msg('user_delete', id_=id_)) + def list_plan_item(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_plan_item', + criterion=criterion)) + + def get_plan_item(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_plan_item', id_=id_)) + + def update_plan_item(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_plan_item', id_=id_, + values=values)) + + def delete_plan_item(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_plan_item', id_=id_)) # Products def create_product(self, ctxt, merchant_id, values): @@ -221,39 +240,40 @@ def update_product(self, ctxt, id_, values): def delete_product(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_product', id_=id_)) - # PlanItems - def create_plan_item(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_plan_item', - values=values)) + # Invoices + def create_invoice(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_invoice', + merchant_id=merchant_id, values=values)) - def list_plan_item(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plan_item', + def list_invoice(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoice', criterion=criterion)) - def get_plan_item(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan_item', id_=id_)) + def get_invoice(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_invoice', id_=id_)) - def update_plan_item(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_plan_item', id_=id_, + def update_invoice(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_invoicet', id_=id_, values=values)) - def delete_plan_item(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_plan_item', id_=id_)) + def delete_invoice(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_invoice', id_=id_)) - # Plans - def create_plan(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_plan', + # Subscriptions + def create_subscription(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_subscription', merchant_id=merchant_id, values=values)) - def list_plan(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plan', criterion=criterion)) + def list_subscription(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_subscription', + criterion=criterion)) - def get_plan(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan', id_=id_)) + def get_subscription(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_subscription', id_=id_)) - def update_plan(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_plan', id_=id_, + def update_subscription(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_subscriptiont', id_=id_, values=values)) - def delete_plan(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_plan', id_=id_)) + def delete_subscription(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index aa5e4e3..09932a3 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -408,6 +408,106 @@ def update_customer(self, ctxt, id_, values): def delete_customer(self, ctxt, id_): return self._delete(models.Customer, id_) + # Plan + def _plan(self, row): + plan = dict(row) + + plan['properties'] = self._kv_rows(row.properties, + func=lambda i: i['value']) + plan['plan_items'] = map(dict, row.plan_items) if row.plan_items\ + else [] + return plan + + def create_plan(self, ctxt, merchant_id, values): + """ + Add a new Plan + + :param merchant_id: The Merchant + :param values: Values describing the new Plan + """ + merchant = self._get(models.Merchant, merchant_id) + + items = values.pop('plan_items', []) + properties = values.pop('properties', {}) + + plan = models.Plan(**values) + + plan.merchant = merchant + self.set_properties(plan, properties) + + for i in items: + item_row = self.create_plan_item(ctxt, i, save=False) + plan.plan_items.append(item_row) + + self._save(plan) + return self._plan(plan) + + def list_plan(self, ctxt, **kw): + """ + List Plan + + :param merchant_id: The Merchant to list it for + """ + rows = self._list(models.Plan, **kw) + return map(self._plan, rows) + + def get_plan(self, ctxt, id_): + """ + Get a Plan + + :param id_: The Plan ID + """ + row = self._get(models.Plan, id_) + return self._plan(row) + + def update_plan(self, ctxt, id_, values): + """ + Update a Plan + + :param id_: The Plan ID + :param values: Values to update with + """ + properties = values.pop('properties', {}) + + row = self._get(models.Plan, id_) + row.update(values) + + self.set_properties(row, properties) + + self._save(row) + return self._plan(row) + + def delete_plan(self, ctxt, id_): + """ + Delete a Plan + + :param id_: Plan ID + """ + self._delete(models.Plan, id_) + + # PlanItem + def create_plan_item(self, ctxt, values, save=True): + ref = models.PlanItem() + return self._update_plan_item(ref, values, save=save) + + def update_plan_item(self, ctxt, item, values, save=True): + return self._update_plan_item(item, values, save=save) + + def _update_plan_item(self, item, values, save=True): + row = self._get_row(item, models.PlanItem) + row.update(values) + return self._save(row, save=save) + + def list_plan_item(self, ctxt, **kw): + return self._list(models.PlanItem, **kw) + + def get_plan_item(self, ctxt, id_): + row = self._get(models.PlanItem, id_) + return dict(row) + + def delete_plan_item(self, ctxt, id_): + self._delete(models.PlanItem, id_) + # Products def _product(self, row): product = dict(row) @@ -478,102 +578,118 @@ def delete_product(self, ctxt, id_): """ self._delete(models.Product, id_) - # PlanItem - def create_plan_item(self, ctxt, values, save=True): - ref = models.PlanItem() - return self._update_plan_item(ref, values, save=save) + # Invoices + def _invoice(self, row): + invoice = dict(row) + return invoice - def update_plan_item(self, ctxt, item, values, save=True): - return self._update_plan_item(item, values, save=save) + def create_invoice(self, ctxt, merchant_id, values): + """ + Add a new Invoice - def _update_plan_item(self, item, values, save=True): - row = self._get_row(item, models.PlanItem) - row.update(values) - return self._save(row, save=save) + :param merchant_id: The Merchant + :param values: Values describing the new Invoice + """ + merchant = self._get(models.Merchant, merchant_id) - def list_plan_item(self, ctxt, **kw): - return self._list(models.PlanItem, **kw) + invoice = models.Invoice(**values) + invoice.merchant = merchant - def get_plan_item(self, ctxt, id_): - row = self._get(models.PlanItem, id_) - return dict(row) + self._save(invoice) + return self._invoice(invoice) - def delete_plan_item(self, ctxt, id_): - self._delete(models.PlanItem, id_) + def list_invoice(self, ctxt, **kw): + """ + List Invoices + """ + rows = self._list(models.Invoice, **kw) + return map(self._invoice, rows) - # Plan - def _plan(self, row): - plan = dict(row) + def get_invoice(self, ctxt, id_): + """ + Get a Invoice - plan['properties'] = self._kv_rows(row.properties, - func=lambda i: i['value']) - plan['plan_items'] = map(dict, row.plan_items) if row.plan_items\ - else [] - return plan + :param id_: The Invoice ID + """ + row = self._get(models.Invoice, id_) + return self.invoice(row) - def create_plan(self, ctxt, merchant_id, values): + def update_invoice(self, ctxt, id_, values): """ - Add a new Plan + Update a Invoice - :param merchant_id: The Merchant - :param values: Values describing the new Plan + :param id_: The Invoice ID + :param values: Values to update with """ - merchant = self._get(models.Merchant, merchant_id) + row = self._get(models.Invoice, id_) + row.update(values) - items = values.pop('plan_items', []) - properties = values.pop('properties', {}) + self._save(row) + return self._invoice(row) - plan = models.Plan(**values) + def delete_invoice(self, ctxt, id_): + """ + Delete a Invoice - plan.merchant = merchant - self.set_properties(plan, properties) + :param id_: Invoice ID + """ + self._delete(models.Invoice, id_) - for i in items: - item_row = self.create_plan_item(ctxt, i, save=False) - plan.plan_items.append(item_row) + # Subscriptions + def _subscription(self, row): + subscription = dict(row) + return subscription - self._save(plan) - return self._plan(plan) + def create_subscription(self, ctxt, customer_id, values): + """ + Add a new Subscription - def list_plan(self, ctxt, **kw): + :param merchant_id: The Merchant + :param values: Values describing the new Subscription """ - List Plan + customer = self._get(models.Customer, customer_id) + + subscription = models.Subscription(**values) + subscription.customer = customer + + self._save(subscription) + return self._subscription(subscription) + + def list_subscription(self, ctxt, **kw): + """ + List Subscriptions :param merchant_id: The Merchant to list it for """ - rows = self._list(models.Plan, **kw) - return map(self._plan, rows) + rows = self._list(models.Subscription, **kw) + return map(self._subscription, rows) - def get_plan(self, ctxt, id_): + def get_subscription(self, ctxt, id_): """ - Get a Plan + Get a Subscription - :param id_: The Plan ID + :param id_: The Subscription ID """ - row = self._get(models.Plan, id_) - return self._plan(row) + row = self._get(models.Subscription, id_) + return self._subscription(row) - def update_plan(self, ctxt, id_, values): + def update_subscription(self, ctxt, id_, values): """ - Update a Plan + Update a Subscription - :param id_: The Plan ID + :param id_: The Subscription ID :param values: Values to update with """ - properties = values.pop('properties', {}) - - row = self._get(models.Plan, id_) + row = self._get(models.Subscription, id_) row.update(values) - self.set_properties(row, properties) - self._save(row) - return self._plan(row) + return self._subscription(row) - def delete_plan(self, ctxt, id_): + def delete_subscription(self, ctxt, id_): """ - Delete a Plan + Delete a Subscription - :param id_: Plan ID + :param id_: Subscription ID """ - self._delete(models.Plan, id_) + self._delete(models.Subscription, id_) From 63529ee3866f4e8b5ec0fb9415d39f4a3ae39e27 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 15:08:48 +0000 Subject: [PATCH 012/182] Update requires --- doc/requirements.txt | 84 +++++++++----------------------------------- 1 file changed, 16 insertions(+), 68 deletions(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index 2c32ab1..6b9d614 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,75 +1,23 @@ - -alembic -anyjson>=0.2.4 +WebOb>=1.2 +eventlet +#pecan +-e git://github.com/ryanpetrello/pecan.git@next#egg=pecan +stevedore argparse -Babel>=0.9.6 -cfg.CONF.import_opt('database_connection', 'billingstack.storage.impl_sqlalchemy', -cfg.CONF.import_opt('storage_driver', 'billingstack.api', - conn = get_connection() - conn.setup_schema() - conn.teardown_schema() - contact_info=contact_info, - contact_info = get_fixture('contact_info') - country_data = { -coverage - currencies = {} - currencies[c['letter']] = conn.currency_add(c) - "currency_id": currencies['nok']['id'], - customer = conn.customer_add( - customer_id=customer['id']) - customer_user, - customer_user = conn.user_add( - customer_user = get_fixture('user') - customer_user['username'] = 'demo_customer' -def get_fixture(name, fixture=0, values={}): -docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. -e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme -eventlet - for c in samples['currency']: - for l in samples['language']: -from billingstack import service -from billingstack.openstack.common import cfg -from billingstack.openstack.common import log as logging -from billingstack.samples import get_samples -from billingstack.storage.impl_sqlalchemy import models -from billingstack.storage import get_connection - f = SAMPLES[name][fixture].copy() - f.update(values) - get_fixture('merchant', values=country_data)) - group='service:api') - group='storage:sqlalchemy') -if __name__ == '__main__': -import sys +anyjson>=0.2.4 +pycountry iso8601 - "language_id": languages['nor']['id']} - languages = {} - languages[l['letter']] = conn.language_add(l) - LOG.info("Re-Syncing database") -LOG = logging.getLogger(__name__) - merchant = conn.merchant_add( - merchant['id'], - merchant['id'], get_fixture('customer', values=country_data)) - merchant['id'], merchant_user, contact_info=contact_info) - merchant_user = conn.user_add( - merchant_user = get_fixture('user') - merchant_user['username'] = 'demo_merchant' -mock -mox +cliff +http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config +unittest2 nose -nosehtmloutput openstack.nose_plugin -# Optional Stuff that is used by default -pecan - print "ADDING", c -pycountry - return f - samples = get_samples() -SAMPLES = get_samples() - service.prepare_service(sys.argv) +nosehtmloutput +coverage +mock +mox +Babel>=0.9.6 sphinx sphinxcontrib-httpdomain -SQLAlchemy>=0.7.8,<=0.7.9 -stevedore -unittest2 -#!/usr/bin/env python -WebOb>=1.2 +docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. From bf25131ba224b688f76e3b3b4a9d945e55d81917 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 15:12:03 +0000 Subject: [PATCH 013/182] Unused --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 885181b..c173ee7 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,6 @@ scripts=[ 'bin/billingstack-api', 'bin/billingstack-identity-api', - 'bin/billingstack-db-manage', 'bin/billingstack-manage', 'bin/billingstack-central' ], From 1601238139116eb78c6b9f9e5cb5e3bbce64bcd0 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 18:16:17 +0000 Subject: [PATCH 014/182] Change name --- tools/{dev_samples.py => load_samples.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tools/{dev_samples.py => load_samples.py} (100%) diff --git a/tools/dev_samples.py b/tools/load_samples.py similarity index 100% rename from tools/dev_samples.py rename to tools/load_samples.py From 797d1f9340abea80ae1ecb54bc770311be76920c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 18:27:17 +0000 Subject: [PATCH 015/182] Add some installation docs :) --- doc/source/glossary.rst | 6 +- doc/source/index.rst | 1 + doc/source/install/index.rst | 25 ++++++ doc/source/install/manual.rst | 142 ++++++++++++++++++++++++++++++++++ 4 files changed, 172 insertions(+), 2 deletions(-) create mode 100644 doc/source/install/index.rst create mode 100644 doc/source/install/manual.rst diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst index d736fe8..5d7bc59 100644 --- a/doc/source/glossary.rst +++ b/doc/source/glossary.rst @@ -22,8 +22,10 @@ Glossary .. glossary:: pgp - PaymentGatewayProvider + PaymentGatewayProvider - A plugin for PaymentGateways pgm - PaymentGatewayMethod + PaymentGatewayMethod - A supported payment method by the PGP api Web API + central + The Central service that does CRUD operations and more in BS. \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst index 6657a2f..4048e3b 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -14,6 +14,7 @@ Contents: architecture api glossary + install/index Indices and tables diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst new file mode 100644 index 0000000..b06ab28 --- /dev/null +++ b/doc/source/install/index.rst @@ -0,0 +1,25 @@ +.. + Copyright 2013 New Dream Network, LLC (DreamHost) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _install: + +======================= + Installing Ceilometer +======================= + +.. toctree:: + :maxdepth: 2 + + manual diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst new file mode 100644 index 0000000..dc6ad00 --- /dev/null +++ b/doc/source/install/manual.rst @@ -0,0 +1,142 @@ +.. + Copyright 2012 Nicolas Barcet for Canonical + 2013 New Dream Network, LLC (DreamHost) + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + + + +===================== + Installing Manually +===================== + +Common Steps +============ + +.. index:: + double: installing; common_steps + +.. note:: + The below operations should take place underneath your /etc folder. + +1. Install system package dependencies (Ubuntu):: + + $ apt-get install python-pip python-virtualenv + $ apt-get install rabbitmq-server mysql-server + $ apt-get build-dep python-lxml + +2. Clone the BillingStack repo off of Github:: + + $ git clone https://github.com/billingstack/billingstack.git + $ cd billingstack + +3. Setup virtualenv:: + +.. note:: + This is to not interfere with system packages etc. + + $ virtualenv --no-site-packages .venv + $ . .venv/bin/activate + +4. Install BillingStack and it's dependencies:: + + $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options + $ python setup.py develop + + Copy sample configs to usable ones, inside the `etc` folder do:: + + $ ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done + + +Installing Central +================== + +.. index:: + double: installing; central + +.. note:: + This is needed because it is the service that the API and others uses to + communicate with to do stuff in the Database. + +1. See `Common Steps`_ before proceeding. + +2. Configure the :term:`central` service:: + + Change the wanted configuration settings to match your environment, the file + is in the `etc` folder:: + + $ vi etc/billingstack.conf + + Refer to :doc:`configuration` details on configuring the service. + +3. Create the DB for :term:`central`:: + + $ python tools/resync_billingstack.py + + +4. Now you might want to load sample data for the time being:: + + $ python tools/dev_samples.py + +5. Start the central service:: + + $ billingstack-central + + +Installing a PGP +================ + +.. index: + double: installing; pgp + +.. note:: + This is REQUIRED to be installed on the same machine that has access to + the database and that has the billingstack-manage command. + +1. Clone a provider repo off of github:: + + $ git clone git@github.com:billingstack/billingstack-braintree.git + +2. Install it in the SAME env / venv as the main billingstack package:: + + $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options + $ python setup.py develop + +3. Now register :term:`pgp` with it's :term:`pgm`:: + + $ billingstack-manage pg-register + + +Installing the API +==================== + +.. index:: + double: installing; api + +.. note:: + The API Server needs to able to talk via MQ to other services. + +1. See `Common Steps`_ before proceeding. + +2. Configure the :term:`api` service:: + + Change the wanted configuration settings to match your environment, the file + is in the `etc` folder:: + + $ vi billingstack.conf + + Refer to :doc:`configuration` details on configuring the service. + +3. Start the API service:: + + $ billingstack-api \ No newline at end of file From a9e6fc94a94a31d025b7f0f7ef5a4894aaa0fdce Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 18:50:58 +0000 Subject: [PATCH 016/182] Move into properties --- billingstack/api/v1/models.py | 3 - billingstack/samples_data/product.json | 360 +++++++++--------- .../storage/impl_sqlalchemy/models.py | 4 - 3 files changed, 180 insertions(+), 187 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 626b3ce..63ff808 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -80,9 +80,6 @@ class Plan(DescribedBase): class Product(DescribedBase): - measure = text - type = text - properties = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/samples_data/product.json b/billingstack/samples_data/product.json index 4e110c3..999d1df 100644 --- a/billingstack/samples_data/product.json +++ b/billingstack/samples_data/product.json @@ -1,182 +1,182 @@ [ - { - "name": "instance", - "type": "gauge", - "measure": "unit", - "description": "Duration of instance", - "properties": { - "resource": "instance_id" - } - }, - { - "name": "memory", - "type": "gauge", - "measure": "mb", - "properties": { - "resource": "instance_id" - }, - "description": "Volume of RAM in MB" - }, - { - "name": "vcpus", - "type": "gauge", - "measure": "vcpu", - "properties": { - "resource": "instance_id" - }, - "description": "Number of VCPUs" - }, - { - "name": "root_disk_size", - "type": "gauge", - "measure": "gb", - "properties": { - "resource": "instance_id" - }, - "description": "Size of root disk in GB" - }, - { - "name": "ephemeral_disk_size", - "type": "gauge", - "measure": "gb", - "properties": { - "resource": "instance_id" - }, - "description": "Size of ephemeral disk in GB" - }, - { - "name": "disk.read.requests", - "type": "cumulative", - "measure": "unit", - "properties": { - "resource": "instance_id" - }, - "description": "Number of disk read requests" - }, - { - "name": "disk.read.bytes", - "type": "cumulative", - "measure": "bytes", - "properties": { - "resource": "instance_id" - }, - "description": "Volume of disk read in bytes" - }, - { - "name": "disk.write.requests", - "type": "cumulative", - "measure": "unit", - "properties": { - "resource": "instance_id" - }, - "description": "Number of disk write requests" - }, - { - "name": "disk.write.bytes", - "type": "cumulative", - "measure": "bytes", - "properties": { - "resource": "instance_id" - }, - "description": "Volume of disk write in bytes" - }, - { - "name": "cpu", - "type": "cumulative", - "measure": "unit", - "properties": { - "resource": "seconds" - }, - "description": "CPU time used" - }, - { - "name": "network.incoming.bytes", - "type": "cumulative", - "measure": "bytes", - "properties": { - "resource": "instance_id" - }, - "description": "number of incoming bytes on the network" - }, - { - "name": "network.outgoing.bytes", - "type": "cumulative", - "measure": "bytes", - "properties": { - "resource": "instance_id" - }, - "description": "number of outgoing bytes on the network" - }, - { - "name": "network.incoming.packets", - "type": "cumulative", - "measure": "packets", - "properties": { - "resource": "instance_id" - }, - "description": "number of incoming packets" - }, - { - "name": "network.outgoing.packets", - "type": "cumulative", - "measure": "packets", - "properties": { - "resource": "instance_id" - }, - "description": "number of outgoing packets" - }, - { - "name": "image", - "type": "gauge", - "measure": "unit", - "properties": { - "resource": "image_id" - }, - "description": "Image polling -> it (still) exists" - }, - { - "name": "image_size", - "type": "gauge", - "measure": "bytes", - "properties": { - "resource": "image_id" - }, - "description": "Uploaded image size" - }, - { - "name": "image_download", - "type": "gauge", - "measure": "bytes", - "properties": { - "resource": "image_id" - }, - "description": "Image is downloaded" - }, - { - "name": "image_serve", - "type": "gauge", - "measure": "bytes", - "properties": { - "resource": "image_id" - }, - "description": "Image is served out" - }, - { - "name": "volume", - "type": "gauge", - "measure": "unit", - "properties": { - "resource": "measure_id" - }, - "description": "Duration of volume" - }, - { - "name": "volume_size", - "type": "gauge", - "measure": "gb", - "properties": { - "resource": "measure_id" - }, - "description": "Size of measure" - } + { + "name" : "instance", + "description" : "Duration of instance", + "properties" : { + "resource" : "instance_id", + "measure" : "unit", + "type" : "gauge" + } + }, + { + "name" : "memory", + "description" : "Volume of RAM in MB", + "properties" : { + "resource" : "instance_id", + "measure" : "mb", + "type" : "gauge" + } + }, + { + "name" : "vcpus", + "description" : "Number of VCPUs", + "properties" : { + "resource" : "instance_id", + "measure" : "vcpu", + "type" : "gauge" + } + }, + { + "name" : "root_disk_size", + "description" : "Size of root disk in GB", + "properties" : { + "resource" : "instance_id", + "measure" : "gb", + "type" : "gauge" + } + }, + { + "name" : "ephemeral_disk_size", + "description" : "Size of ephemeral disk in GB", + "properties" : { + "resource" : "instance_id", + "measure" : "gb", + "type" : "gauge" + } + }, + { + "name" : "disk.read.requests", + "description" : "Number of disk read requests", + "properties" : { + "resource" : "instance_id", + "measure" : "unit", + "type" : "cumulative" + } + }, + { + "name" : "disk.read.bytes", + "description" : "Volume of disk read in bytes", + "properties" : { + "resource" : "instance_id", + "measure" : "bytes", + "type" : "cumulative" + } + }, + { + "name" : "disk.write.requests", + "description" : "Number of disk write requests", + "properties" : { + "resource" : "instance_id", + "measure" : "unit", + "type" : "cumulative" + } + }, + { + "name" : "disk.write.bytes", + "description" : "Volume of disk write in bytes", + "properties" : { + "resource" : "instance_id", + "measure" : "bytes", + "type" : "cumulative" + } + }, + { + "name" : "cpu", + "description" : "CPU time used", + "properties" : { + "resource" : "seconds", + "measure" : "unit", + "type" : "cumulative" + } + }, + { + "name" : "network.incoming.bytes", + "description" : "number of incoming bytes on the network", + "properties" : { + "resource" : "instance_id", + "measure" : "bytes", + "type" : "cumulative" + } + }, + { + "name" : "network.outgoing.bytes", + "description" : "number of outgoing bytes on the network", + "properties" : { + "resource" : "instance_id", + "measure" : "bytes", + "type" : "cumulative" + } + }, + { + "name" : "network.incoming.packets", + "description" : "number of incoming packets", + "properties" : { + "resource" : "instance_id", + "measure" : "packets", + "type" : "cumulative" + } + }, + { + "name" : "network.outgoing.packets", + "description" : "number of outgoing packets", + "properties" : { + "resource" : "instance_id", + "measure" : "packets", + "type" : "cumulative" + } + }, + { + "name" : "image", + "description" : "Image polling -> it (still) exists", + "properties" : { + "resource" : "image_id", + "measure" : "unit", + "type" : "gauge" + } + }, + { + "name" : "image_size", + "description" : "Uploaded image size", + "properties" : { + "resource" : "image_id", + "measure" : "bytes", + "type" : "gauge" + } + }, + { + "name" : "image_download", + "description" : "Image is downloaded", + "properties" : { + "resource" : "image_id", + "measure" : "bytes", + "type" : "gauge" + } + }, + { + "name" : "image_serve", + "description" : "Image is served out", + "properties" : { + "resource" : "image_id", + "measure" : "bytes", + "type" : "gauge" + } + }, + { + "name" : "volume", + "description" : "Duration of volume", + "properties" : { + "resource" : "measure_id", + "measure" : "unit", + "type" : "gauge" + } + }, + { + "name" : "volume_size", + "description" : "Size of measure", + "properties" : { + "resource" : "measure_id", + "measure" : "gb", + "type" : "gauge" + } + } ] diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 556ac9f..4efc05a 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -371,10 +371,6 @@ class Product(BASE, BaseMixin): title = Column(Unicode(100)) description = Column(Unicode(255)) - measure = Column(Unicode(255)) - source = Column(Unicode(255)) - type = Column(Unicode(255)) - price = relationship('Pricing', backref='product', uselist=False) merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), From 9fdd3922fc908a73c7d0a5a1ab83fcfb8211fc4d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 19:52:44 +0000 Subject: [PATCH 017/182] Point to doc... --- README.md | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 4632584..2efbd4c 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ billingstack What is Billingstack? BillingStack is a convergence of efforts done in the previous started Bufunfa -project and the BillingStack Grails (Java) version by Luis Gervaso. +project and the BillingStack Grails (Java) version by Luis Gervaso. The goal is to provide a free alternative to anyone that has a need for a subscription based billingsystem with features compared to other popular ones. @@ -21,14 +21,6 @@ Features include: * REST API - Currently based on Pecan for V1. -Installing -========== - -1. git clone https://github.com/billingstack/billingstack -2. virtualenv .venv -3. pip install -r tools/test-requires -r tools/pip-options -r tools/pip-requires -4. python setup.py develop -5. Edit the config to your liking - vi etc/billingstack/billingstack.conf -6. Run the API - billingstack-api --config-file etc/billingstack/billingstack.conf +Docs: http://billingstack.rtfd.org +Github: http://github.com/billingstack/billingstack +Bugs: http://github.com/billingstack/billingstack \ No newline at end of file From 298cfa8c27cf2822e057978fd290aa73c802ef62 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 15 Mar 2013 20:13:56 +0000 Subject: [PATCH 018/182] Fix license headers --- doc/source/architecture.rst | 2 +- doc/source/glossary.rst | 2 +- doc/source/install/index.rst | 8 ++++---- doc/source/install/manual.rst | 3 +-- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst index 503f8ce..2352e12 100644 --- a/doc/source/architecture.rst +++ b/doc/source/architecture.rst @@ -1,5 +1,5 @@ .. - Copyright 2012 Endre Karlson for Bouvet ASA + Copyright 2013 Endre Karlson Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst index 5d7bc59..e9ac672 100644 --- a/doc/source/glossary.rst +++ b/doc/source/glossary.rst @@ -1,5 +1,5 @@ .. - Copyright 2012 Endre Karlson for Bouvet ASA + Copyright 2013 Endre Karlson Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst index b06ab28..33a2942 100644 --- a/doc/source/install/index.rst +++ b/doc/source/install/index.rst @@ -1,5 +1,5 @@ .. - Copyright 2013 New Dream Network, LLC (DreamHost) + Copyright 2013 Endre Karlson Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain @@ -15,9 +15,9 @@ .. _install: -======================= - Installing Ceilometer -======================= +======================== + Installing Billingstack +======================== .. toctree:: :maxdepth: 2 diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index dc6ad00..e4a766e 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -1,6 +1,5 @@ .. - Copyright 2012 Nicolas Barcet for Canonical - 2013 New Dream Network, LLC (DreamHost) + Copyright 2013 Endre Karlson Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain From c089cc7e9ce0ad07befdcd567194b5863b15d354 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Mar 2013 00:53:07 +0000 Subject: [PATCH 019/182] Change config to properties and add pg_config --- billingstack/api/v1/controllers.py | 51 +++++++++++++++++++ billingstack/api/v1/models.py | 8 ++- billingstack/samples_data/pg_config.json | 2 +- .../storage/impl_sqlalchemy/__init__.py | 10 ++-- .../storage/impl_sqlalchemy/models.py | 10 ++-- billingstack/tests/storage/__init__.py | 4 +- 6 files changed, 72 insertions(+), 13 deletions(-) diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index 811eb38..fda063d 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -358,6 +358,57 @@ def post(self, body): return models.Subscription.from_db(row) +# PaymentGatewayConfig +class PGConfigController(RestBase): + """PGConfig controller""" + __id__ = 'pg_config' + + @wsme_pecan.wsexpose(models.PGConfig, unicode) + def get_all(self): + row = request.central_api.get_pg_config(request.ctxt, self.id_) + + return models.PGConfig.from_db(row) + + @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) + def put(self, body): + row = request.central_api.update_pg_config( + request.ctxt, + self.id_, + body.to_db()) + + return models.PGConfig.from_db(row) + + @wsme_pecan.wsexpose() + def delete(self): + request.central_api.delete_pg_config(request.ctxt, self.id_) + + +class PGConfigsController(RestBase): + """PaymentMethods controller""" + __resource__ = PGConfigController + + @wsme_pecan.wsexpose([models.PGConfig], unicode) + def get_all(self): + criterion = { + 'customer_id': request.context['customer_id'] + } + + rows = request.central_api.list_pg_config( + request.ctxt, + criterion=criterion) + + return [models.PGConfig.from_db(r) for r in rows] + + @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) + def post(self, body): + row = request.central_api.create_pg_config( + request.ctxt, + request.context['customer_id'], + body.to_db()) + + return models.PGConfig.from_db(row) + + # PaymentMethod class PaymentMethodController(RestBase): """PaymentMethod controller""" diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 63ff808..2b7efd5 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -74,7 +74,6 @@ class ContactInfo(Base): website = text - class Plan(DescribedBase): properties = DictType(key_type=text, value_type=property_type) @@ -101,6 +100,13 @@ class Subscription(Base): payment_method_id = text +class PGConfig(Base): + name = text + title = text + + properties = DictType(key_type=text, value_type=property_type) + + class PaymentMethod(Base): name = text identifier = text diff --git a/billingstack/samples_data/pg_config.json b/billingstack/samples_data/pg_config.json index 02b1d9c..f3a93ff 100644 --- a/billingstack/samples_data/pg_config.json +++ b/billingstack/samples_data/pg_config.json @@ -1,6 +1,6 @@ [ { "name": "Braintree Config", - "configuration" : "braintree" + "properties" : {} } ] diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 09932a3..c4da5ac 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -296,7 +296,7 @@ def create_pg_config(self, ctxt, merchant_id, provider_id, values): merchant = self._get_id_or_name(models.Merchant, merchant_id) provider = self._get_id_or_name(models.PGProvider, provider_id) - row = models.PGAccountConfig(**values) + row = models.PGConfig(**values) row.merchant = merchant row.provider = provider @@ -304,19 +304,19 @@ def create_pg_config(self, ctxt, merchant_id, provider_id, values): return dict(row) def list_pg_config(self, ctxt, **kw): - rows = self._list(models.PGAccountConfig, **kw) + rows = self._list(models.PGConfig, **kw) return map(dict, rows) def get_pg_config(self, ctxt, id_): - row = self._get(models.PGAccountConfig, id_) + row = self._get(models.PGConfig, id_) return dict(row) def update_pg_config(self, ctxt, id_, values): - row = self._update(models.PGAccountConfig, id_, values) + row = self._update(models.PGConfig, id_, values) return dict(row) def delete_pg_config(self, ctxt, id_): - self._delete(models.PGAccountConfig, id_) + self._delete(models.PGConfig, id_) # PaymentMethod def create_payment_method(self, ctxt, customer_id, pg_method_id, values): diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 4efc05a..e81668f 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -164,7 +164,7 @@ class Merchant(BASE, BaseMixin): title = Column(Unicode(60)) customers = relationship('Customer', backref='merchant') - payment_gateways = relationship('PGAccountConfig', backref='merchant') + payment_gateways = relationship('PGConfig', backref='merchant') plans = relationship('Plan', backref='merchant') products = relationship('Product', backref='merchant') @@ -178,14 +178,16 @@ class Merchant(BASE, BaseMixin): nullable=False) -class PGAccountConfig(BASE, BaseMixin): +class PGConfig(BASE, BaseMixin): """ A Merchant's configuration of a PaymentGateway like api keys, url and more """ - __tablename__ = 'pg_account_config' + __tablename__ = 'pg_config' + name = Column(Unicode(100), nullable=False) title = Column(Unicode(100)) - configuration = Column(JSON) + + properties = Column(JSON) # Link to the Merchant merchant_id = Column(UUID, ForeignKey('merchant.id'), nullable=False) diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 41e1f80..e2a9a93 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -112,7 +112,7 @@ def test_set_properties(self): self.storage_conn.set_properties(data['id'], metadata, cls=models.Product) actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) - self.assertLen(4, actual['properties']) + self.assertLen(6, actual['properties']) # Payment Gateways def test_pg_provider_register(self): @@ -209,7 +209,7 @@ def test_update_pg_config(self): _, provider = self.pg_provider_register() fixture, data = self.create_pg_config(provider['id']) - fixture['configuration'] = {"api": 1} + fixture['properties'] = {"api": 1} updated = self.storage_conn.update_pg_config(self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) From 9b61a62056393083d3bca958e63de75c8aca3d50 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Mar 2013 00:54:01 +0000 Subject: [PATCH 020/182] Forgot to add resource --- billingstack/api/v1/controllers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index fda063d..2055cc6 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -515,6 +515,7 @@ class MerchantController(RestBase): __resource__ = { "customers": CustomersController, "invoices": InvoicesController, + "payment-gateways": PGConfigsController, "plans": PlansController, "products": ProductsController } From 2933b03f4c0b03db679fb0320e0e824139704b3d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Mar 2013 13:22:26 +0000 Subject: [PATCH 021/182] Correct to right key --- billingstack/api/v1/controllers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index 2055cc6..1233a70 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -390,7 +390,7 @@ class PGConfigsController(RestBase): @wsme_pecan.wsexpose([models.PGConfig], unicode) def get_all(self): criterion = { - 'customer_id': request.context['customer_id'] + 'merchant_id': request.context['merchant_id'] } rows = request.central_api.list_pg_config( From 270bece085ee9d8307b893c06997878362f5aeb2 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Mar 2013 13:22:35 +0000 Subject: [PATCH 022/182] Try to fix command in the note --- doc/source/install/manual.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index e4a766e..c9269eb 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -43,6 +43,7 @@ Common Steps .. note:: This is to not interfere with system packages etc. + :: $ virtualenv --no-site-packages .venv $ . .venv/bin/activate From ebbf989eafa4b89eaa7d6a883879bba309f59903 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Mar 2013 13:25:03 +0000 Subject: [PATCH 023/182] Attempt again... --- doc/source/install/manual.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index c9269eb..0217f04 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -43,7 +43,8 @@ Common Steps .. note:: This is to not interfere with system packages etc. - :: + + $ virtualenv --no-site-packages .venv $ . .venv/bin/activate From 14f1d650181bb93f3759d1bcf87676098b6a90ec Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Mar 2013 14:57:25 +0000 Subject: [PATCH 024/182] Remove ref to configuration --- doc/source/install/manual.rst | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index 0217f04..73f7934 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -44,8 +44,6 @@ Common Steps .. note:: This is to not interfere with system packages etc. - - $ virtualenv --no-site-packages .venv $ . .venv/bin/activate @@ -78,13 +76,12 @@ Installing Central $ vi etc/billingstack.conf - Refer to :doc:`configuration` details on configuring the service. + Refer to the configuration file for details on configuring the service. 3. Create the DB for :term:`central`:: $ python tools/resync_billingstack.py - 4. Now you might want to load sample data for the time being:: $ python tools/dev_samples.py @@ -136,7 +133,7 @@ Installing the API $ vi billingstack.conf - Refer to :doc:`configuration` details on configuring the service. + Refer to the configuration file for details on configuring the service. 3. Start the API service:: From 277851595b19d22db981579f155d4c3196ae7a74 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 17 Mar 2013 14:16:47 +0000 Subject: [PATCH 025/182] Fix race condition on startup --- billingstack/central/service.py | 3 + billingstack/openstack/common/fileutils.py | 35 +++ billingstack/openstack/common/lockutils.py | 250 +++++++++++++++++++++ billingstack/sqlalchemy/session.py | 2 + openstack.conf | 2 +- 5 files changed, 291 insertions(+), 1 deletion(-) create mode 100644 billingstack/openstack/common/fileutils.py create mode 100644 billingstack/openstack/common/lockutils.py diff --git a/billingstack/central/service.py b/billingstack/central/service.py index fbfb1fd..526de41 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -21,7 +21,10 @@ def __init__(self, *args, **kwargs): super(Service, self).__init__(*args, **kwargs) # Get a storage connection + + def start(self): self.storage_conn = storage.get_connection() + super(Service, self).start() def __getattr__(self, name): """ diff --git a/billingstack/openstack/common/fileutils.py b/billingstack/openstack/common/fileutils.py new file mode 100644 index 0000000..b988ad0 --- /dev/null +++ b/billingstack/openstack/common/fileutils.py @@ -0,0 +1,35 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import errno +import os + + +def ensure_tree(path): + """Create a directory (and any ancestor directories required) + + :param path: Directory to create + """ + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST: + if not os.path.isdir(path): + raise + else: + raise diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py new file mode 100644 index 0000000..9ac18a5 --- /dev/null +++ b/billingstack/openstack/common/lockutils.py @@ -0,0 +1,250 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import errno +import functools +import os +import shutil +import tempfile +import time +import weakref + +from eventlet import semaphore +from oslo.config import cfg + +from billingstack.openstack.common import fileutils +from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import local +from billingstack.openstack.common import log as logging + + +LOG = logging.getLogger(__name__) + + +util_opts = [ + cfg.BoolOpt('disable_process_locking', default=False, + help='Whether to disable inter-process locks'), + cfg.StrOpt('lock_path', + help=('Directory to use for lock files. Default to a ' + 'temp directory')) +] + + +CONF = cfg.CONF +CONF.register_opts(util_opts) + + +class _InterProcessLock(object): + """Lock implementation which allows multiple locks, working around + issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does + not require any cleanup. Since the lock is always held on a file + descriptor rather than outside of the process, the lock gets dropped + automatically if the process crashes, even if __exit__ is not executed. + + There are no guarantees regarding usage by multiple green threads in a + single process here. This lock works only between processes. Exclusive + access between local threads should be achieved using the semaphores + in the @synchronized decorator. + + Note these locks are released when the descriptor is closed, so it's not + safe to close the file descriptor while another green thread holds the + lock. Just opening and closing the lock file can break synchronisation, + so lock files must be accessed only using this abstraction. + """ + + def __init__(self, name): + self.lockfile = None + self.fname = name + + def __enter__(self): + self.lockfile = open(self.fname, 'w') + + while True: + try: + # Using non-blocking locks since green threads are not + # patched to deal with blocking locking calls. + # Also upon reading the MSDN docs for locking(), it seems + # to have a laughable 10 attempts "blocking" mechanism. + self.trylock() + return self + except IOError, e: + if e.errno in (errno.EACCES, errno.EAGAIN): + # external locks synchronise things like iptables + # updates - give it some time to prevent busy spinning + time.sleep(0.01) + else: + raise + + def __exit__(self, exc_type, exc_val, exc_tb): + try: + self.unlock() + self.lockfile.close() + except IOError: + LOG.exception(_("Could not release the acquired lock `%s`"), + self.fname) + + def trylock(self): + raise NotImplementedError() + + def unlock(self): + raise NotImplementedError() + + +class _WindowsLock(_InterProcessLock): + def trylock(self): + msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) + + def unlock(self): + msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) + + +class _PosixLock(_InterProcessLock): + def trylock(self): + fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) + + def unlock(self): + fcntl.lockf(self.lockfile, fcntl.LOCK_UN) + + +if os.name == 'nt': + import msvcrt + InterProcessLock = _WindowsLock +else: + import fcntl + InterProcessLock = _PosixLock + +_semaphores = weakref.WeakValueDictionary() + + +def synchronized(name, lock_file_prefix, external=False, lock_path=None): + """Synchronization decorator. + + Decorating a method like so:: + + @synchronized('mylock') + def foo(self, *args): + ... + + ensures that only one thread will execute the foo method at a time. + + Different methods can share the same lock:: + + @synchronized('mylock') + def foo(self, *args): + ... + + @synchronized('mylock') + def bar(self, *args): + ... + + This way only one of either foo or bar can be executing at a time. + + The lock_file_prefix argument is used to provide lock files on disk with a + meaningful prefix. The prefix should end with a hyphen ('-') if specified. + + The external keyword argument denotes whether this lock should work across + multiple processes. This means that if two different workers both run a + a method decorated with @synchronized('mylock', external=True), only one + of them will execute at a time. + + The lock_path keyword argument is used to specify a special location for + external lock files to live. If nothing is set, then CONF.lock_path is + used as a default. + """ + + def wrap(f): + @functools.wraps(f) + def inner(*args, **kwargs): + # NOTE(soren): If we ever go natively threaded, this will be racy. + # See http://stackoverflow.com/questions/5390569/dyn + # amically-allocating-and-destroying-mutexes + sem = _semaphores.get(name, semaphore.Semaphore()) + if name not in _semaphores: + # this check is not racy - we're already holding ref locally + # so GC won't remove the item and there was no IO switch + # (only valid in greenthreads) + _semaphores[name] = sem + + with sem: + LOG.debug(_('Got semaphore "%(lock)s" for method ' + '"%(method)s"...'), {'lock': name, + 'method': f.__name__}) + + # NOTE(mikal): I know this looks odd + if not hasattr(local.strong_store, 'locks_held'): + local.strong_store.locks_held = [] + local.strong_store.locks_held.append(name) + + try: + if external and not CONF.disable_process_locking: + LOG.debug(_('Attempting to grab file lock "%(lock)s" ' + 'for method "%(method)s"...'), + {'lock': name, 'method': f.__name__}) + cleanup_dir = False + + # We need a copy of lock_path because it is non-local + local_lock_path = lock_path + if not local_lock_path: + local_lock_path = CONF.lock_path + + if not local_lock_path: + cleanup_dir = True + local_lock_path = tempfile.mkdtemp() + + if not os.path.exists(local_lock_path): + cleanup_dir = True + fileutils.ensure_tree(local_lock_path) + + # NOTE(mikal): the lock name cannot contain directory + # separators + safe_name = name.replace(os.sep, '_') + lock_file_name = '%s%s' % (lock_file_prefix, safe_name) + lock_file_path = os.path.join(local_lock_path, + lock_file_name) + + try: + lock = InterProcessLock(lock_file_path) + with lock: + LOG.debug(_('Got file lock "%(lock)s" at ' + '%(path)s for method ' + '"%(method)s"...'), + {'lock': name, + 'path': lock_file_path, + 'method': f.__name__}) + retval = f(*args, **kwargs) + finally: + LOG.debug(_('Released file lock "%(lock)s" at ' + '%(path)s for method "%(method)s"...'), + {'lock': name, + 'path': lock_file_path, + 'method': f.__name__}) + # NOTE(vish): This removes the tempdir if we needed + # to create one. This is used to + # cleanup the locks left behind by unit + # tests. + if cleanup_dir: + shutil.rmtree(local_lock_path) + else: + retval = f(*args, **kwargs) + + finally: + local.strong_store.locks_held.remove(name) + + return retval + return inner + return wrap diff --git a/billingstack/sqlalchemy/session.py b/billingstack/sqlalchemy/session.py index 4feb25f..b83abb3 100644 --- a/billingstack/sqlalchemy/session.py +++ b/billingstack/sqlalchemy/session.py @@ -25,6 +25,7 @@ from sqlalchemy.pool import NullPool, StaticPool from oslo.config import cfg +from billingstack.openstack.common import lockutils from billingstack.openstack.common import log as logging from billingstack.openstack.common.gettextutils import _ @@ -55,6 +56,7 @@ ] +@lockutils.synchronized('session', 'billingstack-') def get_session(config_group, autocommit=True, expire_on_commit=False, diff --git a/openstack.conf b/openstack.conf index da110a8..4830922 100644 --- a/openstack.conf +++ b/openstack.conf @@ -1,3 +1,3 @@ [DEFAULT] -modules=iniparser,importutils,excutils,local,jsonutils,timeutils,service,eventlet_backdoor,loopingcall,utils,exception,setup,version,uuidutils,processutils,db,log,gettextutils,iso8601,notifier,rpc,context,threadgroup,network_utils +modules=iniparser,importutils,excutils,local,jsonutils,timeutils,service,eventlet_backdoor,loopingcall,utils,exception,setup,version,uuidutils,processutils,db,log,gettextutils,iso8601,notifier,rpc,context,threadgroup,network_utils,lockutils,fileutils base=billingstack From a5802f719aa6d78d969eb8952baaf6c2b4cabcee Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 17 Mar 2013 20:48:33 +0000 Subject: [PATCH 026/182] Support query filters in GET url. --- billingstack/api/base.py | 90 +++++++++++++++++++++++++++++- billingstack/api/v1/controllers.py | 77 ++++++++++--------------- billingstack/api/v1/models.py | 3 + billingstack/exceptions.py | 7 +++ billingstack/sqlalchemy/api.py | 89 ++++++++++++++++++++++++++++- 5 files changed, 216 insertions(+), 50 deletions(-) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index 3b0b38a..90e54b5 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -2,7 +2,7 @@ import pecan from pecan import request from pecan.rest import RestController -from wsme.types import Base, UserType, Unset +from wsme.types import Base, Enum, UserType, text, Unset, wsproperty from billingstack.openstack.common import log @@ -24,6 +24,94 @@ def fromnativetype(self, value): property_type = Property() +operation_kind = Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') + + +class Query(Base): + """ + Query filter. + """ + + _op = None # provide a default + + def get_op(self): + return self._op or 'eq' + + def set_op(self, value): + self._op = value + + field = text + "The name of the field to test" + + #op = wsme.wsattr(operation_kind, default='eq') + # this ^ doesn't seem to work. + op = wsproperty(operation_kind, get_op, set_op) + "The comparison operator. Defaults to 'eq'." + + value = text + "The value to compare against the stored data" + + def __repr__(self): + # for logging calls + return '' % (self.field, self.op, self.value) + + @classmethod + def sample(cls): + return cls(field='resource_id', + op='eq', + value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', + ) + + def as_dict(self): + return { + 'op': self.op, + 'field': self.field, + 'value': self.value + } + + +def _query_to_kwargs(query, db_func): + # TODO(dhellmann): This function needs tests of its own. + valid_keys = inspect.getargspec(db_func)[0] + if 'self' in valid_keys: + valid_keys.remove('self') + translation = {'user_id': 'user', + 'project_id': 'project', + 'resource_id': 'resource'} + stamp = {} + trans = {} + metaquery = {} + for i in query: + if i.field == 'timestamp': + # FIXME(dhellmann): This logic is not consistent with the + # way the timestamps are treated inside the mongo driver + # (the end timestamp is always tested using $lt). We + # should just pass a single timestamp through to the + # storage layer with the operator and let the storage + # layer use that operator. + if i.op in ('lt', 'le'): + stamp['end_timestamp'] = i.value + elif i.op in ('gt', 'ge'): + stamp['start_timestamp'] = i.value + else: + LOG.warn('_query_to_kwargs ignoring %r unexpected op %r"' % + (i.field, i.op)) + else: + if i.op != 'eq': + LOG.warn('_query_to_kwargs ignoring %r unimplemented op %r' % + (i.field, i.op)) + elif i.field == 'search_offset': + stamp['search_offset'] = i.value + elif i.field.startswith('metadata.'): + metaquery[i.field] = i.value + else: + trans[translation.get(i.field, i.field)] = i.value + + kwargs = {} + if metaquery and 'metaquery' in valid_keys: + kwargs['metaquery'] = metaquery + + class RestBase(RestController): __resource__ = None __id__ = None diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py index 1233a70..f9803b6 100644 --- a/billingstack/api/v1/controllers.py +++ b/billingstack/api/v1/controllers.py @@ -21,7 +21,7 @@ import wsmeext.pecan as wsme_pecan from billingstack.openstack.common import log -from billingstack.api.base import RestBase +from billingstack.api.base import RestBase, Query from billingstack.api.v1 import models LOG = log.getLogger(__name__) @@ -195,15 +195,11 @@ def delete(self): class PlansController(RestBase): __resource__ = PlanController - @wsme_pecan.wsexpose([models.Plan]) - def get_all(self): - criterion = { - 'merchant_id': request.context['merchant_id'] - } - + @wsme_pecan.wsexpose([models.Plan], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_plan( request.ctxt, - criterion=criterion) + criterion=[o.as_dict() for o in q]) return [models.Plan.from_db(r) for r in rows] @@ -244,9 +240,11 @@ def delete(self): class ProductsController(RestBase): __resource__ = ProductController - @wsme_pecan.wsexpose([models.Product]) - def get_all(self): - rows = request.central_api.list_product(request.ctxt) + @wsme_pecan.wsexpose([models.Product], [Query]) + def get_all(self, q=[]): + rows = request.central_api.list_product( + request.ctxt, + criterion=[o.as_dict() for o in q]) return [models.Product.from_db(r) for r in rows] @@ -287,15 +285,11 @@ def delete(self): class InvoicesController(RestBase): __resource__ = InvoiceController - @wsme_pecan.wsexpose([models.Invoice]) - def get_all(self): - criterion = { - 'merchant_id': request.context['merchant_id'] - } - + @wsme_pecan.wsexpose([models.Invoice], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_invoice( request.ctxt, - criterion=criterion) + criterion=[o.as_dict() for o in q]) return [models.Invoice.from_db(r) for r in rows] @@ -336,15 +330,11 @@ def delete(self): class SubscriptionsController(RestBase): __resource__ = SubscriptionController - @wsme_pecan.wsexpose([models.Subscription]) - def get_all(self): - criterion = { - 'customer_id': request.context['customer_id'] - } - + @wsme_pecan.wsexpose([models.Subscription], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_subscription( request.ctxt, - criterion=criterion) + criterion=[o.as_dict() for o in q]) return [models.Subscription.from_db(r) for r in rows] @@ -387,15 +377,11 @@ class PGConfigsController(RestBase): """PaymentMethods controller""" __resource__ = PGConfigController - @wsme_pecan.wsexpose([models.PGConfig], unicode) - def get_all(self): - criterion = { - 'merchant_id': request.context['merchant_id'] - } - + @wsme_pecan.wsexpose([models.PGConfig], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_pg_config( request.ctxt, - criterion=criterion) + criterion=[o.as_dict() for o in q]) return [models.PGConfig.from_db(r) for r in rows] @@ -438,15 +424,11 @@ class PaymentMethodsController(RestBase): """PaymentMethods controller""" __resource__ = PaymentMethodController - @wsme_pecan.wsexpose([models.PaymentMethod], unicode) - def get_all(self): - criterion = { - 'customer_id': request.context['customer_id'] - } - + @wsme_pecan.wsexpose([models.PaymentMethod], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_payment_method( request.ctxt, - criterion=criterion) + criterion=[o.as_dict() for o in q]) return [models.PaymentMethod.from_db(r) for r in rows] @@ -465,8 +447,7 @@ class CustomerController(RestBase): """Customer controller""" __id__ = 'customer' __resource__ = { - "payment-methods": PaymentMethodsController, - "subscriptions": SubscriptionsController + "payment-methods": PaymentMethodsController } @wsme_pecan.wsexpose(models.Customer, unicode) @@ -493,10 +474,11 @@ class CustomersController(RestBase): """Customers controller""" __resource__ = CustomerController - @wsme_pecan.wsexpose([models.Customer]) - def get_all(self): + @wsme_pecan.wsexpose([models.Customer], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_customer( - request.ctxt, criterion={"merchant_id": self.parent.id_}) + request.ctxt, + criterion=[o.as_dict() for o in q]) return [models.Customer.from_db(r) for r in rows] @@ -517,7 +499,8 @@ class MerchantController(RestBase): "invoices": InvoicesController, "payment-gateways": PGConfigsController, "plans": PlansController, - "products": ProductsController + "products": ProductsController, + "subscriptions": SubscriptionsController } @wsme_pecan.wsexpose(models.Merchant) @@ -544,8 +527,8 @@ class MerchantsController(RestBase): """Merchants controller""" __resource__ = MerchantController - @wsme_pecan.wsexpose([models.Merchant]) - def get_all(self): + @wsme_pecan.wsexpose([models.Merchant], [Query]) + def get_all(self, q=[]): rows = request.central_api.list_merchant(request.ctxt) return [models.Merchant.from_db(i) for i in rows] diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 2b7efd5..59ae830 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -2,6 +2,9 @@ from billingstack.api.base import ModelBase, property_type +from billingstack.openstack.common import log + +LOG = log.getLogger(__name__) class Base(ModelBase): diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py index 35f8cab..aaea0da 100644 --- a/billingstack/exceptions.py +++ b/billingstack/exceptions.py @@ -45,6 +45,13 @@ class InvalidSortKey(Base): pass +class InvalidQueryField(Base): + pass + + +class InvalidOperator(Base): + pass + class Duplicate(Base): pass diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index 8164a50..ec2b4b4 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -1,3 +1,5 @@ +import operator + from sqlalchemy.orm import exc from billingstack import exceptions @@ -8,6 +10,85 @@ LOG = log.getLogger(__name__) +class Filterer(object): + """ + Helper to apply filters... + """ + std_op = [ + (('eq', '==', '='), operator.eq), + (('ne', '!='), operator.ne), + (('ge', '>='), operator.ge), + (('le', '<='), operator.le), + (('gt', '>'), operator.gt), + (('le', '<'), operator.lt) + ] + + def __init__(self, model, query, criterion): + self.model = model + self.query = query + + if isinstance(criterion, dict): + criterion = self.from_dict(criterion) + + self.criterion = criterion + + def from_dict(self, criterion): + """ + Transform a dict with key values to a filter compliant list of dicts. + + :param criterion: The criterion dict. + """ + data = [] + for key, value in criterion.items(): + c = { + 'field': key, + 'value': value, + 'op': 'eq' + } + data.append(c) + return data + + def get_op(self, op_key): + """ + Get the operator. + + :param op_key: The operator key as string. + """ + for op_keys, op in self.std_op: + if op_key in op_keys: + return op + + def apply_criteria(self): + """ + Apply the actual criterion in this filterer and return a query with + filters applied. + """ + query = self.query + + for c in self.criterion: + # NOTE: Try to get the column + try: + col = getattr(self.model, c['field']) + except AttributeError: + msg = '%s is not a valid field to query by' % c['field'] + raise exceptions.InvalidQueryField(msg) + + # NOTE: Handle a special operator + std_op = self.get_op(c['op']) + if hasattr(self, c['op']): + getattr(self, c['op'])(c) + elif std_op: + query = query.filter(std_op(col, c['value'])) + elif c['op'] in ('%', 'like'): + query = query.filter(col.like(c['value'])) + elif c['op'] in ('!%', 'nlike'): + query = query.filter(col.notlike(c['value'])) + else: + msg = 'Invalid operator in criteria \'%s\'' % c + raise exceptions.InvalidOperator(msg) + return query + + class HelpersMixin(object): def setup(self, config_group): self.session = session.get_session(config_group) @@ -36,7 +117,10 @@ def _save(self, obj, save=True): def _list(self, cls=None, query=None, criterion=None): """ - A generic list method + A generic list/search helper method. + + Example criterion: + [{'field': 'id', 'op': 'eq', 'value': 'someid'}] :param cls: The model to try to delete :param criterion: Criterion to match objects with @@ -47,7 +131,8 @@ def _list(self, cls=None, query=None, criterion=None): query = query or self.session.query(cls) if criterion: - query = query.filter_by(**criterion) + filterer = Filterer(cls, query, criterion) + query = filterer.apply_criteria() try: result = query.all() From e0309e2f284b10b8dd14310d32700455999015e6 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 18 Mar 2013 21:37:13 +0000 Subject: [PATCH 027/182] Add Resources --- doc/source/glossary.rst | 9 +++++- doc/source/index.rst | 1 + doc/source/resources/index.rst | 25 ++++++++++++++++ doc/source/resources/subscriptions.rst | 41 ++++++++++++++++++++++++++ 4 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 doc/source/resources/index.rst create mode 100644 doc/source/resources/subscriptions.rst diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst index e9ac672..05b7c16 100644 --- a/doc/source/glossary.rst +++ b/doc/source/glossary.rst @@ -28,4 +28,11 @@ Glossary api Web API central - The Central service that does CRUD operations and more in BS. \ No newline at end of file + The Central service that does CRUD operations and more in BS. + customer + An entity underneath :term:`merchant` that holds different data that + resembles a Customer in an external system like a Tenant, Project etc. + merchant + An entity that holds one or more users, can configure integration with + third party services like OpenStack ceilometer, configure api + credentials for API access etc. \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst index 4048e3b..8225239 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -15,6 +15,7 @@ Contents: api glossary install/index + resources/index Indices and tables diff --git a/doc/source/resources/index.rst b/doc/source/resources/index.rst new file mode 100644 index 0000000..cc50083 --- /dev/null +++ b/doc/source/resources/index.rst @@ -0,0 +1,25 @@ +.. + Copyright 2013 Endre Karlson + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _install: + +========================= +Resources in Billingstack +========================= + +.. toctree:: + :maxdepth: 2 + + subscriptions \ No newline at end of file diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst new file mode 100644 index 0000000..1524c6b --- /dev/null +++ b/doc/source/resources/subscriptions.rst @@ -0,0 +1,41 @@ +.. + Copyright 2013 Endre Karlson + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _subscription: + + +============ +Subscription +============ + +.. index:: + double: subscription; brief + +Process ++++++++ + +.. note:: Try to outline a sample subscription creation process. + +* Prerequisites: Registered Merchant with API credentials configured for a Service. + +1. The :term:`merchant` configures a API access key for others services. +2. Have an external service to create a new subscription against + BillingStack when a new :term:`resource` is created in a system. + +3. Subscription is either created towards an existing:term:`customer` or + if the :term:`merchant` has a setting configured to allow :term:`customer` + created if the given customer doesn't exist it will be created along with + subscription. +4. When a subscription is created we're ready to receive events from a system. \ No newline at end of file From 6884532e2f3ca65619be84c21682696c97bf7af0 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 18 Mar 2013 21:51:16 +0000 Subject: [PATCH 028/182] Fix typos --- doc/source/resources/subscriptions.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst index 1524c6b..58f4f9f 100644 --- a/doc/source/resources/subscriptions.rst +++ b/doc/source/resources/subscriptions.rst @@ -34,8 +34,8 @@ Process 2. Have an external service to create a new subscription against BillingStack when a new :term:`resource` is created in a system. -3. Subscription is either created towards an existing:term:`customer` or +3. Subscription is either created towards an existing :term:`customer` or if the :term:`merchant` has a setting configured to allow :term:`customer` - created if the given customer doesn't exist it will be created along with + created if the given :term:`customer` doesn't exist it will be created along with subscription. 4. When a subscription is created we're ready to receive events from a system. \ No newline at end of file From 68c11bbfa453a3d195441b2da0f71dcd5f81381a Mon Sep 17 00:00:00 2001 From: woorea Date: Tue, 19 Mar 2013 00:44:44 +0100 Subject: [PATCH 029/182] updated process --- doc/source/resources/subscriptions.rst | 51 +++++++++++++++++++++----- 1 file changed, 42 insertions(+), 9 deletions(-) diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst index 58f4f9f..be59f99 100644 --- a/doc/source/resources/subscriptions.rst +++ b/doc/source/resources/subscriptions.rst @@ -1,5 +1,6 @@ .. Copyright 2013 Endre Karlson + Copyright 2013 Luis Gervaso Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain @@ -28,14 +29,46 @@ Process .. note:: Try to outline a sample subscription creation process. -* Prerequisites: Registered Merchant with API credentials configured for a Service. -1. The :term:`merchant` configures a API access key for others services. -2. Have an external service to create a new subscription against - BillingStack when a new :term:`resource` is created in a system. -3. Subscription is either created towards an existing :term:`customer` or - if the :term:`merchant` has a setting configured to allow :term:`customer` - created if the given :term:`customer` doesn't exist it will be created along with - subscription. -4. When a subscription is created we're ready to receive events from a system. \ No newline at end of file +* Prerequisites: Registered Merchant with API credentials configured and a merchant plan available. + +1. User registers in the merchant portal application using the merchant identity manager (e.g keystone) + +2. Portal gathers the available plans from BillingStack + + GET /merchants//plans + +3. User select the desired plan to subscribe in + + 3.1 If user is not registered in BillingStack then portal will register first the user in BillingStack + for a customer account + + POST /users + + POST /accounts + + PUT /account//users//roles/ + + PUT /merchants//customers/ + + At this point the user is registered in BillingStack + + 3.2 BillingStack subscription is created for the BillingStack customer + + 3.1 Create the BillingStack Subscription + + POST /merchants//subscriptions + + 3.2 Create a new OpenStack tenant + + POST /tenants + + 3.3 Add OpenStack user to the recently created tenant + + PUT /tenants//users//roles/ + + 3.4 Update subscription resource attribute with the tenant id from OpenStack + + PATCH /merchants//subscriptions/ +4. Now the subscription can start receiving usage data from ceilometer tied by resource attribute \ No newline at end of file From 6cbd83349dbbb88b386a5879f054269ed19d94af Mon Sep 17 00:00:00 2001 From: woorea Date: Tue, 19 Mar 2013 05:49:20 +0100 Subject: [PATCH 030/182] updated --- doc/source/resources/subscriptions.rst | 51 ++++++++++++++++++-------- 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst index be59f99..ce8a43e 100644 --- a/doc/source/resources/subscriptions.rst +++ b/doc/source/resources/subscriptions.rst @@ -24,51 +24,72 @@ Subscription .. index:: double: subscription; brief +Prerequisites ++++++++++++++ + +.. note:: BillingStack does not store merchant customer users. Merchant should manage authorization. + +1. Merchant and Plan created in BillingStack + +2. bs-admin Role create in Merchant Identity Manager (e.g keystone) + Process +++++++ .. note:: Try to outline a sample subscription creation process. +1. User registers in the merchant portal application using the merchant identity manager (e.g keystone) + POST /v2.0/users -* Prerequisites: Registered Merchant with API credentials configured and a merchant plan available. +2. User login in the merchant portal application using merchant identity manager (e.g keystone) -1. User registers in the merchant portal application using the merchant identity manager (e.g keystone) + POST /v2.0/tokens -2. Portal gathers the available plans from BillingStack + At this point user has an unscoped token + +3. User decides to subscribe in one of the merchant plans + + 3.1 Using the merchan API key & secret portal gathers all the available plans from BillingStack GET /merchants//plans -3. User select the desired plan to subscribe in + 3.2 User select the desired plan to subscribe in + + 3.1 Since the current token is unscoped it's necessary to create customer in BillingStack - 3.1 If user is not registered in BillingStack then portal will register first the user in BillingStack - for a customer account + POST /merchant//customers - POST /users + Using the customer_id obtained from BillingStack a new OpenStack tenant is created + this special tenant should be named as : bs-customer- - POST /accounts + POST /v2.0/tenants - PUT /account//users//roles/ + PUT /v2.0/tenants//users//role/ + PUT /v2.0/tenants//users//role/ - PUT /merchants//customers/ + Now it is necessary exchange the unscoped token to a scoped one - At this point the user is registered in BillingStack + POST /v2.0/tokens 3.2 BillingStack subscription is created for the BillingStack customer 3.1 Create the BillingStack Subscription - POST /merchants//subscriptions + POST /merchants//subscriptions 3.2 Create a new OpenStack tenant - POST /tenants + POST /tenants + + This tenant should be named bs-subscription- 3.3 Add OpenStack user to the recently created tenant - PUT /tenants//users//roles/ + PUT /tenants//users//roles/ 3.4 Update subscription resource attribute with the tenant id from OpenStack PATCH /merchants//subscriptions/ -4. Now the subscription can start receiving usage data from ceilometer tied by resource attribute \ No newline at end of file + +4. Now the subscription can start receiving usage data from ceilometer tied by resource attribute From 97d259f84c3e17d374484e3d9b74673efc69a258 Mon Sep 17 00:00:00 2001 From: woorea Date: Tue, 19 Mar 2013 05:51:57 +0100 Subject: [PATCH 031/182] update --- doc/source/resources/subscriptions.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst index ce8a43e..6bab97e 100644 --- a/doc/source/resources/subscriptions.rst +++ b/doc/source/resources/subscriptions.rst @@ -74,21 +74,21 @@ Process 3.2 BillingStack subscription is created for the BillingStack customer - 3.1 Create the BillingStack Subscription + 3.2.1 Create the BillingStack Subscription POST /merchants//subscriptions - 3.2 Create a new OpenStack tenant + 3.2.2 Create a new OpenStack tenant POST /tenants This tenant should be named bs-subscription- - 3.3 Add OpenStack user to the recently created tenant + 3.2.3 Add OpenStack user to the recently created tenant PUT /tenants//users//roles/ - 3.4 Update subscription resource attribute with the tenant id from OpenStack + 3.2.4 Update subscription resource attribute with the tenant id from OpenStack PATCH /merchants//subscriptions/ From 227cb29fbf1424ee5066df23fa4e9ba24457c142 Mon Sep 17 00:00:00 2001 From: woorea Date: Tue, 19 Mar 2013 05:53:40 +0100 Subject: [PATCH 032/182] new line --- doc/source/resources/subscriptions.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst index 6bab97e..aa0775d 100644 --- a/doc/source/resources/subscriptions.rst +++ b/doc/source/resources/subscriptions.rst @@ -66,6 +66,7 @@ Process POST /v2.0/tenants PUT /v2.0/tenants//users//role/ + PUT /v2.0/tenants//users//role/ Now it is necessary exchange the unscoped token to a scoped one From 0be523a5733e64eee24be867925795315d7f3a69 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 19 Mar 2013 23:28:51 +0000 Subject: [PATCH 033/182] Switch to Flask for API --- billingstack/api/__init__.py | 16 +- billingstack/api/app.py | 62 -- billingstack/api/auth.py | 55 ++ billingstack/api/base.py | 184 +++- billingstack/api/config.py | 43 - billingstack/api/hooks.py | 40 - billingstack/api/service.py | 52 ++ billingstack/api/v1/__init__.py | 52 +- billingstack/api/v1/controllers.py | 557 ------------ billingstack/api/v1/resources.py | 527 ++++++++++++ billingstack/api/versions.py | 33 + billingstack/central/rpcapi.py | 62 +- billingstack/openstack/common/sslutils.py | 80 ++ billingstack/openstack/common/wsgi.py | 797 ++++++++++++++++++ billingstack/openstack/common/xmlutils.py | 74 ++ billingstack/sqlalchemy/api.py | 13 + billingstack/sqlalchemy/model_base.py | 4 +- billingstack/sqlalchemy/session.py | 3 +- .../storage/impl_sqlalchemy/__init__.py | 30 +- billingstack/tests/api/__init__.py | 1 - billingstack/tests/api/base.py | 124 ++- billingstack/tests/api/v1/__init__.py | 1 - billingstack/tests/api/v1/base.py | 23 - billingstack/tests/api/v1/test_currency.py | 6 +- billingstack/tests/api/v1/test_customer.py | 6 +- .../tests/api/v1/test_invoice_state.py | 6 +- billingstack/tests/api/v1/test_language.py | 6 +- billingstack/tests/api/v1/test_merchant.py | 6 +- billingstack/tests/api/v1/test_plan.py | 6 +- billingstack/tests/api/v1/test_product.py | 6 +- billingstack/tests/identity/test_api.py | 6 +- billingstack/tests/storage/__init__.py | 6 +- billingstack/{api/root.py => wsgi.py} | 26 +- bin/billingstack-api | 46 +- etc/billingstack/api-paste.ini.sample | 33 + etc/billingstack/billingstack.conf.sample | 3 + setup.py | 2 + tools/pip-requires | 13 +- 38 files changed, 2042 insertions(+), 968 deletions(-) delete mode 100644 billingstack/api/app.py create mode 100644 billingstack/api/auth.py delete mode 100644 billingstack/api/config.py delete mode 100644 billingstack/api/hooks.py create mode 100644 billingstack/api/service.py delete mode 100644 billingstack/api/v1/controllers.py create mode 100644 billingstack/api/v1/resources.py create mode 100644 billingstack/api/versions.py create mode 100644 billingstack/openstack/common/sslutils.py create mode 100644 billingstack/openstack/common/wsgi.py create mode 100644 billingstack/openstack/common/xmlutils.py rename billingstack/{api/root.py => wsgi.py} (52%) create mode 100644 etc/billingstack/api-paste.ini.sample diff --git a/billingstack/api/__init__.py b/billingstack/api/__init__.py index 58bc8a7..c4609c6 100644 --- a/billingstack/api/__init__.py +++ b/billingstack/api/__init__.py @@ -15,6 +15,10 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# +# Copied: Moniker +import flask +from billingstack.openstack.common import jsonutils as json from oslo.config import cfg @@ -22,8 +26,16 @@ cfg.IntOpt('api_port', default=9091, help='The port for the billing API server'), cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.StrOpt('storage_driver', default='sqlalchemy', - help='Storage driver to use'), + cfg.IntOpt('workers', default=None, + help='Number of worker processes to spawn'), + cfg.StrOpt('api_paste_config', default='api-paste.ini', + help='File name for the paste.deploy config for the api'), + cfg.StrOpt('auth_strategy', default='noauth', + help='The strategy to use for auth. Supports noauth or ' + 'keystone'), ] cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:api') + +# Allows us to serialize datetime's etc +flask.helpers.json = json diff --git a/billingstack/api/app.py b/billingstack/api/app.py deleted file mode 100644 index ee3682a..0000000 --- a/billingstack/api/app.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 Woorea Solutions, S.L -# -# Author: Luis Gervaso -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from pecan import configuration -from pecan import make_app - -from billingstack.api import config as api_config -from billingstack.api import hooks - - -def get_pecan_config(): - # Set up the pecan configuration - filename = api_config.__file__.replace('.pyc', '.py') - return configuration.conf_from_file(filename) - - -def setup_app(pecan_config=None, extra_hooks=None): - - app_hooks = [hooks.ConfigHook(), - hooks.RPCHook() - ] - - if extra_hooks: - app_hooks.extend(extra_hooks) - - if not pecan_config: - pecan_config = get_pecan_config() - - app_hooks.append(hooks.NoAuthHook()) - - configuration.set_config(dict(pecan_config), overwrite=True) - - app = make_app( - pecan_config.app.root, - static_root=pecan_config.app.static_root, - template_path=pecan_config.app.template_path, - logging=getattr(pecan_config, 'logging', {}), - debug=getattr(pecan_config.app, 'debug', False), - force_canonical=getattr(pecan_config.app, 'force_canonical', True), - hooks=app_hooks, - guess_content_type_from_ext=getattr( - pecan_config.app, - 'guess_content_type_from_ext', - True), - ) - - return app diff --git a/billingstack/api/auth.py b/billingstack/api/auth.py new file mode 100644 index 0000000..aebfcd3 --- /dev/null +++ b/billingstack/api/auth.py @@ -0,0 +1,55 @@ +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: Moniker + +from oslo.config import cfg + +from billingstack.openstack.common import local +from billingstack.openstack.common import log as logging +from billingstack.openstack.common.context import RequestContext +from billingstack import wsgi + +LOG = logging.getLogger(__name__) + + +def pipeline_factory(loader, global_conf, **local_conf): + """ + A paste pipeline replica that keys off of auth_strategy. + + Code nabbed from cinder. + """ + pipeline = local_conf[cfg.CONF['service:api'].auth_strategy] + pipeline = pipeline.split() + filters = [loader.get_filter(n) for n in pipeline[:-1]] + app = loader.get_app(pipeline[-1]) + filters.reverse() + for filter in filters: + app = filter(app) + return app + + +class NoAuthContextMiddleware(wsgi.Middleware): + def process_request(self, request): + # NOTE(kiall): This makes the assumption that disabling authentication + # means you wish to allow full access to everyone. + context = RequestContext(is_admin=True) + + # Store the context where oslo-log exepcts to find it. + local.store.context = context + + # Attach the context to the request environment + request.environ['context'] = context diff --git a/billingstack/api/base.py b/billingstack/api/base.py index 90e54b5..1d50bf2 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -1,8 +1,15 @@ import inspect -import pecan -from pecan import request -from pecan.rest import RestController +import mimetypes +import traceback + + +from flask import abort, request, Blueprint, Response +from billingstack.openstack.common.wsgi import JSONDictSerializer, \ + XMLDictSerializer, JSONDeserializer + from wsme.types import Base, Enum, UserType, text, Unset, wsproperty +from werkzeug.datastructures import MIMEAccept + from billingstack.openstack.common import log @@ -52,7 +59,7 @@ def set_op(self, value): "The value to compare against the stored data" def __repr__(self): - # for logging calls + # for LOG calls return '' % (self.field, self.op, self.value) @classmethod @@ -112,44 +119,6 @@ def _query_to_kwargs(query, db_func): kwargs['metaquery'] = metaquery -class RestBase(RestController): - __resource__ = None - __id__ = None - - def __init__(self, parent=None, id_=None): - self.parent = parent - if self.__id__: - request.context[self.__id__ + '_id'] = id_ - self.id_ = id_ - - @pecan.expose() - def _lookup(self, *url_data): - """ - A fun approach to _lookup - checks self.__resource__ for the "id" - """ - id_ = None - if len(url_data) >= 1: - id_ = url_data[0] - parts = url_data[1:] if len(url_data) > 1 else () - LOG.debug("Lookup: id '%s' parts '%s'", id_, parts) - - resource = self.__resource__ - if inspect.isclass(resource) and issubclass(resource, RestBase): - return resource(parent=self, id_=id_), parts - - def __getattr__(self, name): - """ - Overload this to look in self.__resource__ if name is defined as a - Controller - """ - if name in self.__dict__: - return self.__dict__[name] - elif isinstance(self.__resource__, dict) and name in self.__resource__: - return self.__resource__[name](parent=self) - else: - raise AttributeError - - class ModelBase(Base): def as_dict(self): """ @@ -180,3 +149,134 @@ def from_db(cls, values): Return a class of this object from values in the from_db """ return cls(**values) + + +class Rest(Blueprint): + def get(self, rule, status_code=200): + return self._mroute('GET', rule, status_code) + + def post(self, rule, status_code=202): + return self._mroute('POST', rule, status_code) + + def put(self, rule, status_code=202): + return self._mroute('PUT', rule, status_code) + + def delete(self, rule, status_code=204): + return self._mroute('DELETE', rule, status_code) + + def _mroute(self, methods, rule, status_code=None): + if type(methods) is str: + methods = [methods] + return self.route(rule, methods=methods, status_code=status_code) + + def route(self, rule, **options): + status = options.pop('status_code', None) + + def decorator(func): + endpoint = options.pop('endpoint', func.__name__) + + def handler(**kwargs): + # extract response content type + resp_type = request.accept_mimetypes + type_suffix = kwargs.pop('resp_type', None) + if type_suffix: + suffix_mime = mimetypes.guess_type("res." + type_suffix)[0] + if suffix_mime: + resp_type = MIMEAccept([(suffix_mime, 1)]) + request.resp_type = resp_type + + # NOTE: Extract fields (column selection) + fields = list(set(request.args.getlist('fields'))) + fields.sort() + request.fields_selector = fields + + if status: + request.status_code = status + + return func(**kwargs) + + #_rule = "/" + rule + # NOTE: Add 2 set of rules, 1 with response content type and one wo + self.add_url_rule(rule, endpoint, handler, **options) + rtype_rule = rule + '.' + self.add_url_rule(rtype_rule, endpoint, handler, **options) + + return func + + return decorator + + +RT_JSON = MIMEAccept([("application/json", 1)]) +RT_XML = MIMEAccept([("application/xml", 1)]) + + +def render(res=None, resp_type=None, status=None, **kwargs): + if not res: + res = {} + elif isinstance(res, ModelBase): + res = res.as_dict() + elif isinstance(res, list): + new_res = [] + for r in res: + new_res.append(r.as_dict()) + res = new_res + + if isinstance(res, dict): + res.update(kwargs) + elif kwargs: + # can't merge kwargs into the non-dict res + abort_and_log(500, "Non-dict and non-empty kwargs passed to render") + + status_code = getattr(request, 'status_code', None) + if status: + status_code = status + if not status_code: + status_code = 200 + + if not resp_type: + req_resp_type = getattr(request, 'resp_type', None) + resp_type = req_resp_type if req_resp_type else RT_JSON + + serializer = None + if "application/json" in resp_type: + resp_type = RT_JSON + serializer = JSONDictSerializer() + elif "application/xml" in resp_type: + resp_type = RT_XML + serializer = XMLDictSerializer() + else: + abort_and_log(400, "Content type '%s' isn't supported" % resp_type) + + body = serializer.serialize(res) + resp_type = str(resp_type) + return Response(response=body, status=status_code, mimetype=resp_type) + + +def request_data(model): + if not request.content_length > 0: + LOG.debug("Empty body provided in request") + return dict() + + deserializer = None + content_type = request.mimetype + + if not content_type or content_type in RT_JSON: + deserializer = JSONDeserializer() + elif content_type in RT_XML: + abort_and_log(400, "XML requests are not supported yet") + # deserializer = XMLDeserializer() + else: + abort_and_log(400, "Content type '%s' isn't supported" % content_type) + + data = deserializer.deserialize(request.data)['body'] + return model(**data).to_db() + + +def abort_and_log(status_code, descr, exc=None): + LOG.error("Request aborted with status code %s and message '%s'", + status_code, descr) + + if exc is not None: + LOG.error(traceback.format_exc()) + + abort(status_code, description=descr) diff --git a/billingstack/api/config.py b/billingstack/api/config.py deleted file mode 100644 index ea8ca91..0000000 --- a/billingstack/api/config.py +++ /dev/null @@ -1,43 +0,0 @@ -# Server Specific Configurations -server = { - 'port': '9001', - 'host': '0.0.0.0' -} - -# Pecan Application Configurations -app = { - 'root': 'billingstack.api.root.RootController', - 'modules': ['billingstack.api'], - 'static_root': '%(confdir)s/public', - 'template_path': '%(confdir)s/templates', - 'debug': False, - 'enable_acl': True, -} - -logging = { - 'loggers': { - 'root': {'level': 'INFO', 'handlers': ['console']}, - 'billingstack': {'level': 'DEBUG', 'handlers': ['console']}, - 'wsme': {'level': 'DEBUG', 'handlers': ['console']} - }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - } - }, - 'formatters': { - 'simple': { - 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' - '[%(threadName)s] %(message)s') - } - }, -} - -# Custom Configurations must be in Python dictionary format:: -# -# foo = {'bar':'baz'} -# -# All configurations are accessible at:: -# pecan.conf diff --git a/billingstack/api/hooks.py b/billingstack/api/hooks.py deleted file mode 100644 index f93c3e4..0000000 --- a/billingstack/api/hooks.py +++ /dev/null @@ -1,40 +0,0 @@ -from pecan import hooks -from oslo.config import cfg - -from billingstack import storage -from billingstack.central.rpcapi import CentralAPI -from billingstack.openstack.common.context import RequestContext - - -class NoAuthHook(hooks.PecanHook): - """ - Simple auth - all requests will be is_admin=True - """ - def before(self, state): - state.request.ctxt = RequestContext(is_admin=True) - - -class ConfigHook(hooks.PecanHook): - """Attach the configuration object to the request - so controllers can get to it. - """ - - def before(self, state): - state.request.cfg = cfg.CONF - - -class DBHook(hooks.PecanHook): - def before(self, state): - storage_engine = storage.get_engine( - state.request.cfg['service:api'].storage_driver) - state.request.storage_engine = storage_engine - state.request.storage_conn = storage_engine.get_connection() - - # def after(self, state): - # print 'method:', state.request.method - # print 'response:', state.response.status - - -class RPCHook(hooks.PecanHook): - def before(self, state): - state.request.central_api = CentralAPI() diff --git a/billingstack/api/service.py b/billingstack/api/service.py new file mode 100644 index 0000000..20be4eb --- /dev/null +++ b/billingstack/api/service.py @@ -0,0 +1,52 @@ +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: Moniker +from oslo.config import cfg +from paste import deploy + +from billingstack.openstack.common import log as logging +from billingstack.openstack.common import wsgi +from billingstack import exceptions +from billingstack import utils +#from billingstack import policy + + +LOG = logging.getLogger(__name__) + + +class Service(wsgi.Service): + def __init__(self, backlog=128, threads=1000): + + api_paste_config = cfg.CONF['service:api'].api_paste_config + config_paths = utils.find_config(api_paste_config) + + if len(config_paths) == 0: + msg = 'Unable to determine appropriate api-paste-config file' + raise exceptions.ConfigurationError(msg) + + LOG.info('Using api-paste-config found at: %s' % config_paths[0]) + + #policy.init_policy() + + application = deploy.loadapp("config:%s" % config_paths[0], + name='bs_api') + + super(Service, self).__init__(application=application, + host=cfg.CONF['service:api'].api_listen, + port=cfg.CONF['service:api'].api_port, + backlog=backlog, + threads=threads) diff --git a/billingstack/api/v1/__init__.py b/billingstack/api/v1/__init__.py index 3d3035c..9f48d24 100644 --- a/billingstack/api/v1/__init__.py +++ b/billingstack/api/v1/__init__.py @@ -1 +1,51 @@ -from .controllers import V1Controller +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: Moniker +import flask +from oslo.config import cfg +from stevedore import named +from billingstack.openstack.common import log as logging +from billingstack.api.v1.resources import bp as v1_bp + +LOG = logging.getLogger(__name__) + + +cfg.CONF.register_opts([ + cfg.ListOpt('enabled-extensions-v1', default=[], + help='Enabled API Extensions'), +], group='service:api') + + +def factory(global_config, **local_conf): + app = flask.Flask('billingstack.api.v1') + + app.register_blueprint(v1_bp) + + # TODO(kiall): Ideally, we want to make use of the Plugin class here. + # This works for the moment though. + def _register_blueprint(ext): + app.register_blueprint(ext.plugin) + + # Add any (enabled) optional extensions + extensions = cfg.CONF['service:api'].enabled_extensions_v1 + + if len(extensions) > 0: + extmgr = named.NamedExtensionManager('billingstack.api.v1.extensions', + names=extensions) + extmgr.map(_register_blueprint) + + return app diff --git a/billingstack/api/v1/controllers.py b/billingstack/api/v1/controllers.py deleted file mode 100644 index f9803b6..0000000 --- a/billingstack/api/v1/controllers.py +++ /dev/null @@ -1,557 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 Woorea Solutions, S.L -# -# Author: Luis Gervaso -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import request - -import wsmeext.pecan as wsme_pecan - -from billingstack.openstack.common import log -from billingstack.api.base import RestBase, Query -from billingstack.api.v1 import models - -LOG = log.getLogger(__name__) - - -class CurrencyController(RestBase): - @wsme_pecan.wsexpose(models.Currency) - def get_all(self): - row = request.central_api.get_currency(request.ctxt, - self.id_) - return models.Currency.from_db(row) - - @wsme_pecan.wsexpose(models.Currency, body=models.Currency) - def put(self, body): - row = request.central_api.update_currency( - request.ctxt, - self.id_, - body.to_db()) - return models.Currency.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_currency(request.ctxt, self.id_) - - -class CurrenciesController(RestBase): - """Currsencies controller""" - __resource__ = CurrencyController - - @wsme_pecan.wsexpose([models.Currency]) - def get_all(self): - rows = request.central_api.list_currency(request.ctxt) - - return [models.Currency.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Currency, body=models.Currency) - def post(self, body): - row = request.central_api.create_currency( - request.ctxt, - body.to_db()) - - return models.Currency.from_db(row) - - -class LanguageController(RestBase): - @wsme_pecan.wsexpose(models.Language) - def get_all(self): - row = request.central_api.get_language(request.ctxt, - self.id_) - return models.Language.from_db(row) - - @wsme_pecan.wsexpose(models.Language, body=models.Language) - def put(self, body): - row = request.central_api.update_language( - request.ctxt, - self.id_, - body.to_db()) - return models.Language.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_language(request.ctxt, self.id_) - - -class LanguagesController(RestBase): - """Languages controller""" - __resource__ = LanguageController - - @wsme_pecan.wsexpose([models.Language]) - def get_all(self): - rows = request.central_api.list_language(request.ctxt) - - return [models.Language.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Language, body=models.Language) - def post(self, body): - row = request.central_api.create_language( - request.ctxt, - body.to_db()) - - return models.Language.from_db(row) - - -class PGProvidersController(RestBase): - """ - PaymentGatewayProviders - """ - @wsme_pecan.wsexpose([models.PGProvider]) - def get_all(self): - rows = request.central_api.list_pg_provider(request.ctxt) - - return [models.PGProvider.from_db(r) for r in rows] - - -class PGMethodsController(RestBase): - """ - PGMethods lister... - """ - @wsme_pecan.wsexpose([models.PGMethod]) - def get_all(self): - rows = request.central_api.list_pg_method(request.ctxt) - - return [models.PGMethod.from_db(r) for r in rows] - - -class InvoiceStateController(RestBase): - @wsme_pecan.wsexpose(models.InvoiceState) - def get_all(self): - row = request.central_api.get_invoice_state(request.ctxt, - self.id_) - return models.InvoiceState.from_db(row) - - @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) - def put(self, body): - row = request.central_api.update_invoice_state( - request.ctxt, - self.id_, - body.to_db()) - return models.InvoiceState.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_invoice_state(request.ctxt, self.id_) - - -class InvoiceStatecontroller(RestBase): - """ - PaymentGatewayProviders - """ - __resource__ = InvoiceStateController - - @wsme_pecan.wsexpose([models.InvoiceState]) - def get_all(self): - rows = request.central_api.list_invoice_state(request.ctxt) - - return [models.InvoiceState.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) - def post(self, body): - row = request.central_api.create_invoice_state( - request.ctxt, - body.to_db()) - - return models.InvoiceState.from_db(row) - - -# Plans -class PlanController(RestBase): - __id__ = 'plan' - - @wsme_pecan.wsexpose(models.Plan) - def get_all(self): - row = request.central_api.get_plan(request.ctxt, self.id_) - - return models.Plan.from_db(row) - - @wsme_pecan.wsexpose(models.Plan, body=models.Plan) - def put(self, body): - row = request.central_api.update_plan( - request.ctxt, - self.id_, - body.to_db()) - - return models.Plan.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_plan(request.ctxt, self.id_) - - -class PlansController(RestBase): - __resource__ = PlanController - - @wsme_pecan.wsexpose([models.Plan], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_plan( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.Plan.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Plan, body=models.Plan) - def post(self, body): - row = request.central_api.create_plan( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Plan.from_db(row) - - -# Products -class ProductController(RestBase): - __id__ = 'product' - - @wsme_pecan.wsexpose(models.Product) - def get_all(self): - row = request.central_api.get_product(request.ctxt, self.id_) - - return models.Product.from_db(row) - - @wsme_pecan.wsexpose(models.Product, body=models.Product) - def put(self, body): - row = request.central_api.update_product( - request.ctxt, - self.id_, - body.to_db()) - - return models.Product.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_product(request.ctxt, self.id_) - - -class ProductsController(RestBase): - __resource__ = ProductController - - @wsme_pecan.wsexpose([models.Product], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_product( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.Product.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Product, body=models.Product) - def post(self, body): - row = request.central_api.create_product( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Product.from_db(row) - - -# Invoice -class InvoiceController(RestBase): - __id__ = 'invoice' - - @wsme_pecan.wsexpose(models.Invoice) - def get_all(self): - row = request.central_api.get_invoice(request.ctxt, self.id_) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) - def put(self, body): - row = request.central_api.update_invoice( - request.ctxt, - self.id_, - body.to_db()) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_invoice(request.ctxt, self.id_) - - -class InvoicesController(RestBase): - __resource__ = InvoiceController - - @wsme_pecan.wsexpose([models.Invoice], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_invoice( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.Invoice.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) - def post(self, body): - row = request.central_api.create_invoice( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Invoice.from_db(row) - - -# Subscription -class SubscriptionController(RestBase): - __id__ = 'subscription' - - @wsme_pecan.wsexpose(models.Subscription) - def get_all(self): - row = request.central_api.get_subscription(request.ctxt, self.id_) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) - def put(self, body): - row = request.central_api.update_subscription( - request.ctxt, - self.id_, - body.to_db()) - - return models.Subscription.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_subscription(request.ctxt, self.id_) - - -class SubscriptionsController(RestBase): - __resource__ = SubscriptionController - - @wsme_pecan.wsexpose([models.Subscription], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_subscription( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.Subscription.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) - def post(self, body): - row = request.central_api.create_subscription( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Subscription.from_db(row) - - -# PaymentGatewayConfig -class PGConfigController(RestBase): - """PGConfig controller""" - __id__ = 'pg_config' - - @wsme_pecan.wsexpose(models.PGConfig, unicode) - def get_all(self): - row = request.central_api.get_pg_config(request.ctxt, self.id_) - - return models.PGConfig.from_db(row) - - @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) - def put(self, body): - row = request.central_api.update_pg_config( - request.ctxt, - self.id_, - body.to_db()) - - return models.PGConfig.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_pg_config(request.ctxt, self.id_) - - -class PGConfigsController(RestBase): - """PaymentMethods controller""" - __resource__ = PGConfigController - - @wsme_pecan.wsexpose([models.PGConfig], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_pg_config( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.PGConfig.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) - def post(self, body): - row = request.central_api.create_pg_config( - request.ctxt, - request.context['customer_id'], - body.to_db()) - - return models.PGConfig.from_db(row) - - -# PaymentMethod -class PaymentMethodController(RestBase): - """PaymentMethod controller""" - __id__ = 'payment_method' - - @wsme_pecan.wsexpose(models.PaymentMethod, unicode) - def get_all(self): - row = request.central_api.get_payment_method(request.ctxt, self.id_) - - return models.PaymentMethod.from_db(row) - - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) - def put(self, body): - row = request.central_api.update_payment_method( - request.ctxt, - self.id_, - body.to_db()) - - return models.PaymentMethod.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_payment_method(request.ctxt, self.id_) - - -class PaymentMethodsController(RestBase): - """PaymentMethods controller""" - __resource__ = PaymentMethodController - - @wsme_pecan.wsexpose([models.PaymentMethod], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_payment_method( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.PaymentMethod.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) - def post(self, body): - row = request.central_api.create_payment_method( - request.ctxt, - request.context['customer_id'], - body.to_db()) - - return models.PaymentMethod.from_db(row) - - -# Customers -class CustomerController(RestBase): - """Customer controller""" - __id__ = 'customer' - __resource__ = { - "payment-methods": PaymentMethodsController - } - - @wsme_pecan.wsexpose(models.Customer, unicode) - def get_all(self): - row = request.central_api.get_customer(request.ctxt, self.id_) - - return models.Customer.from_db(row) - - @wsme_pecan.wsexpose(models.Customer, body=models.Customer) - def put(self, body): - row = request.central_api.update_customer( - request.ctxt, - self.id_, - body.to_db()) - - return models.Customer.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_customer(request.ctxt, self.id_) - - -class CustomersController(RestBase): - """Customers controller""" - __resource__ = CustomerController - - @wsme_pecan.wsexpose([models.Customer], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_customer( - request.ctxt, - criterion=[o.as_dict() for o in q]) - - return [models.Customer.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(models.Customer, body=models.Customer) - def post(self, body): - row = request.central_api.create_customer( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Customer.from_db(row) - - -class MerchantController(RestBase): - __id__ = 'merchant' - __resource__ = { - "customers": CustomersController, - "invoices": InvoicesController, - "payment-gateways": PGConfigsController, - "plans": PlansController, - "products": ProductsController, - "subscriptions": SubscriptionsController - } - - @wsme_pecan.wsexpose(models.Merchant) - def get_all(self): - row = request.central_api.get_merchant(request.ctxt, self.id_) - - return models.Merchant.from_db(row) - - @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) - def put(self, body): - row = request.central_api.update_merchant( - request.ctxt, - self.id_, - body.to_db()) - - return models.Merchant.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.central_api.delete_merchant(request.ctxt, self.id_) - - -class MerchantsController(RestBase): - """Merchants controller""" - __resource__ = MerchantController - - @wsme_pecan.wsexpose([models.Merchant], [Query]) - def get_all(self, q=[]): - rows = request.central_api.list_merchant(request.ctxt) - - return [models.Merchant.from_db(i) for i in rows] - - @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) - def post(self, body): - row = request.central_api.create_merchant( - request.ctxt, - body.to_db()) - - return models.Merchant.from_db(row) - - -class V1Controller(RestBase): - """Version 1 API controller.""" - - __resource__ = { - 'invoice-states': InvoiceStatecontroller, - 'payment-gateway-providers': PGProvidersController, - 'payment-gateway-methods': PGMethodsController - } - - currencies = CurrenciesController() - languages = LanguagesController() - - merchants = MerchantsController() diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py new file mode 100644 index 0000000..556b578 --- /dev/null +++ b/billingstack/api/v1/resources.py @@ -0,0 +1,527 @@ +from flask import request + + +from billingstack.api.base import Rest, render, request_data +from billingstack.api.v1 import models +from billingstack.central.rpcapi import central_api + + +bp = Rest('v1', __name__) + + +@bp.get('/') +def index(): + return render() + + +# Currencies +@bp.post('/currencies') +def create_currency(): + data = request_data(models.Currency) + + row = central_api.create_currency(request.environ['context'], data) + + return render(models.Currency.from_db(row)) + + +@bp.get('/currencies') +def list_currencies(): + rows = central_api.list_currencies(request.environ['context']) + + return render([models.Currency.from_db(r) for r in rows]) + + +@bp.get('/currencies/') +def get_currency(currency_id): + row = central_api.get_currency(request.environ['context'], + currency_id) + + return render(models.Currency.from_db(row)) + + +@bp.put('/currencies/') +def update_currency(currency_id): + data = request_data(models.Currency) + + row = central_api.update_currency( + request.environ['context'], + currency_id, + data) + + return render(models.Currency.from_db(row)) + + +@bp.delete('/currencies/') +def delete_currency(currency_id): + central_api.delete_currency(request.environ['context'], currency_id) + return render() + + +# Language +@bp.post('/languages') +def create_language(): + data = request_data(models.Language) + + row = central_api.create_language(request.environ['context'], data) + + return render(models.Language.from_db(row)) + + +@bp.get('/languages') +def list_languages(): + rows = central_api.list_languages(request.environ['context']) + + return render([models.Language.from_db(r) for r in rows]) + + +@bp.get('/languages/') +def get_language(language_id): + row = central_api.get_language(request.environ['context'], + language_id) + + return render(models.Language.from_db(row)) + + +@bp.put('/languages/') +def update_language(language_id): + data = request_data(models.Language) + + row = central_api.update_language( + request.environ['context'], + language_id, + data) + + return render(models.Language.from_db(row)) + + +@bp.delete('/languages/') +def delete_language(language_id): + central_api.delete_language(request.environ['context'], language_id) + return render() + + +# PGP / PGM +@bp.get('/payment-gateway-providers') +def list_pg_providers(self): + rows = request.central_api.list_pg_provider(request.ctxt) + + return render([models.PGProvider.from_db(r) for r in rows]) + + +@bp.get('/payment-gateway-methods') +def list_pg_methods(self): + rows = request.central_api.list_pg_method(request.ctxt) + + return render([models.PGMethod.from_db(r) for r in rows]) + + +# invoice_states +@bp.post('/invoice-states') +def create_invoice_state(): + data = request_data(models.InvoiceState) + + row = central_api.create_invoice_state(request.environ['context'], data) + + return render(models.InvoiceState.from_db(row)) + + +@bp.get('/invoice-states') +def list_invoice_states(): + rows = central_api.list_invoice_states(request.environ['context']) + + return render([models.InvoiceState.from_db(r) for r in rows]) + + +@bp.get('/invoice-states/') +def get_invoice_state(state_id): + row = central_api.get_invoice_state(request.environ['context'], + state_id) + + return render(models.InvoiceState.from_db(row)) + + +@bp.put('/invoice-states/') +def update_invoice_state(state_id): + data = request_data(models.InvoiceState) + + row = central_api.update_invoice_state( + request.environ['context'], + state_id, + data) + + return render(models.InvoiceState.from_db(row)) + + +@bp.delete('/invoice-states/') +def delete_invoice_state(state_id): + central_api.delete_invoice_state( + request.environ['context'], + state_id) + return render() + + +# merchants +@bp.post('/merchants') +def create_merchant(): + data = request_data(models.Merchant) + + row = central_api.create_merchant(request.environ['context'], data) + + return render(models.Merchant.from_db(row)) + + +@bp.get('/merchants') +def list_merchants(): + rows = central_api.list_merchants(request.environ['context']) + + return render([models.Merchant.from_db(r) for r in rows]) + + +@bp.get('/merchants/') +def get_merchant(merchant_id): + row = central_api.get_merchant(request.environ['context'], + merchant_id) + + return render(models.Merchant.from_db(row)) + + +@bp.put('/merchants/') +def update_merchant(merchant_id): + data = request_data(models.Merchant) + + row = central_api.update_merchant( + request.environ['context'], + merchant_id, + data) + + return render(models.Merchant.from_db(row)) + + +@bp.delete('/merchants/') +def delete_merchant(merchant_id): + central_api.delete_merchant(request.environ['context'], merchant_id) + return render() + + +# Invoices +@bp.post('/merchants//invoices') +def create_payment_gateway(merchant_id): + data = request_data(models.Invoice) + + row = central_api.create_pg_config( + request.environ['context'], + merchant_id, + data) + + return render(models.Invoice.from_db(row)) + + +@bp.get('/merchants//payment-gateways') +def list_payment_gateways(merchant_id): + rows = central_api.list_pg_config(request.environ['context']) + + return render([models.Invoice.from_db(r) for r in rows]) + + +@bp.get('/merchants//payment-gateways/') +def get_payment_gateway(merchant_id, pg_config_id): + row = central_api.get_pg_config(request.environ['context'], pg_config_id) + + return render(models.Invoice.from_db(row)) + + +@bp.put('/merchants//payment-gateways/') +def update_payment_gateway(merchant_id, pg_config_id): + data = request_data(models.Invoice) + + row = central_api.update_pg_config( + request.environ['context'], + pg_config_id, + data) + + return render(models.Invoice.from_db(row)) + + +@bp.delete('/merchants//payment-gateways/') +def delete_pg_config(merchant_id, pg_config_id): + central_api.delete_pg_config( + request.environ['context'], + pg_config_id) + return render() + + +# customers +@bp.post('/merchants//customers') +def create_customer(merchant_id): + data = request_data(models.Customer) + + row = central_api.create_customer( + request.environ['context'], + merchant_id, + data) + + return render(models.Customer.from_db(row)) + + +@bp.get('/merchants//customers') +def list_customers(merchant_id): + rows = central_api.list_customers(request.environ['context']) + + return render([models.Customer.from_db(r) for r in rows]) + + +@bp.get('/merchants//customers/') +def get_customer(merchant_id, customer_id): + row = central_api.get_customer(request.environ['context'], + customer_id) + + return render(models.Customer.from_db(row)) + + +@bp.put('/merchants//customers/') +def update_customer(merchant_id, customer_id): + data = request_data(models.Customer) + + row = central_api.update_customer( + request.environ['context'], + customer_id, + data) + + return render(models.Customer.from_db(row)) + + +@bp.delete('/merchants//customers/') +def delete_customer(merchant_id, customer_id): + central_api.delete_customer(request.environ['context'], customer_id) + return render() + + +# PaymentMethods +@bp.post('/merchants//customers//payment-methods') +def create_payment_method(merchant_id, customer_id): + data = request_data(models.PaymentMethod) + + row = central_api.create_payment_method( + request.environ['context'], + merchant_id, + data) + + return render(models.PaymentMethod.from_db(row)) + + +@bp.get('/merchants//customers//payment-methods') +def list_payment_methods(merchant_id): + rows = central_api.list_payment_methods(request.environ['context']) + + return render([models.PaymentMethod.from_db(r) for r in rows]) + + +@bp.get('/merchants//customers//payment-methods/' + '') +def get_payment_method(merchant_id, customer_id, pm_id): + row = central_api.get_payment_method(request.environ['context'], pm_id) + + return render(models.PaymentMethod.from_db(row)) + + +@bp.put('/merchants//customers/') +def update_payment_method(merchant_id, customer_id, pm_id): + data = request_data(models.PaymentMethod) + + row = central_api.update_payment_method(request.environ['context'], pm_id, + data) + + return render(models.PaymentMethod.from_db(row)) + + +@bp.delete('/merchants//customers//payment-methods/' + '') +def delete_payment_method(merchant_id, customer_id, pm_id): + central_api.delete_payment_method(request.environ['context'], pm_id) + return render() + + +# Plans +@bp.post('/merchants//plans') +def create_plan(merchant_id): + data = request_data(models.Plan) + + row = central_api.create_plan( + request.environ['context'], + merchant_id, + data) + + return render(models.Plan.from_db(row)) + + +@bp.get('/merchants//plans') +def list_plans(merchant_id): + rows = central_api.list_plans(request.environ['context']) + + return render([models.Plan.from_db(r) for r in rows]) + + +@bp.get('/merchants//plans/') +def get_plan(merchant_id, plan_id): + row = central_api.get_plan(request.environ['context'], + plan_id) + + return render(models.Plan.from_db(row)) + + +@bp.put('/merchants//plans/') +def update_plan(merchant_id, plan_id): + data = request_data(models.Plan) + + row = central_api.update_plan( + request.environ['context'], + plan_id, + data) + + return render(models.Plan.from_db(row)) + + +@bp.delete('/merchants//plans/') +def delete_plan(merchant_id, plan_id): + central_api.delete_plan(request.environ['context'], plan_id) + return render() + + +# Products +@bp.post('/merchants//products') +def create_product(merchant_id): + data = request_data(models.Product) + + row = central_api.create_product( + request.environ['context'], + merchant_id, + data) + + return render(models.Product.from_db(row)) + + +@bp.get('/merchants//products') +def list_products(merchant_id): + rows = central_api.list_products(request.environ['context']) + + return render([models.Product.from_db(r) for r in rows]) + + +@bp.get('/merchants//products/') +def get_product(merchant_id, product_id): + row = central_api.get_product(request.environ['context'], + product_id) + + return render(models.Product.from_db(row)) + + +@bp.put('/merchants//products/') +def update_product(merchant_id, product_id): + data = request_data(models.Product) + + row = central_api.update_product( + request.environ['context'], + product_id, + data) + + return render(models.Product.from_db(row)) + + +@bp.delete('/merchants//products/') +def delete_product(merchant_id, product_id): + central_api.delete_product(request.environ['context'], product_id) + return render() + + +# Invoices +@bp.post('/merchants//invoices') +def create_invoice(merchant_id): + data = request_data(models.Invoice) + + row = central_api.create_invoice( + request.environ['context'], + merchant_id, + data) + + return render(models.Invoice.from_db(row)) + + +@bp.get('/merchants//invoices') +def list_invoices(merchant_id): + rows = central_api.list_invoices(request.environ['context']) + + return render([models.Invoice.from_db(r) for r in rows]) + + +@bp.get('/merchants//invoices/') +def get_invoice(merchant_id, invoice_id): + row = central_api.get_invoice(request.environ['context'], + invoice_id) + + return render(models.Invoice.from_db(row)) + + +@bp.put('/merchants//invoices/') +def update_invoice(merchant_id, invoice_id): + data = request_data(models.Invoice) + + row = central_api.update_invoice( + request.environ['context'], + invoice_id, + data) + + return render(models.Invoice.from_db(row)) + + +@bp.delete('/merchants//invoices/') +def delete_invoice(merchant_id, invoice_id): + central_api.delete_invoice(request.environ['context'], invoice_id) + return render() + + +# Subscription +@bp.post('/merchants//subscriptions') +def create_subscription(merchant_id): + data = request_data(models.Invoice) + + row = central_api.create_subscription( + request.environ['context'], + merchant_id, + data) + + return render(models.Invoice.from_db(row)) + + +@bp.get('/merchants//subscriptions') +def list_subscriptions(merchant_id): + rows = central_api.list_subscriptions(request.environ['context']) + + return render([models.Invoice.from_db(r) for r in rows]) + + +@bp.get('/merchants//subscriptions/') +def get_subscription(merchant_id, subscription_id): + row = central_api.get_subscription(request.environ['context'], + subscription_id) + + return render(models.Invoice.from_db(row)) + + +@bp.put('/merchants//subscriptions/') +def update_subscription(merchant_id, subscription_id): + data = request_data(models.Invoice) + + row = central_api.update_subscription( + request.environ['context'], + subscription_id, + data) + + return render(models.Invoice.from_db(row)) + + +@bp.delete('/merchants//subscriptions/') +def delete_subscription(merchant_id, subscription_id): + central_api.delete_subscription( + request.environ['context'], + subscription_id) + return render() diff --git a/billingstack/api/versions.py b/billingstack/api/versions.py new file mode 100644 index 0000000..5b94e41 --- /dev/null +++ b/billingstack/api/versions.py @@ -0,0 +1,33 @@ +# Copyright 2012 Hewlett-Packard Development Company, L.P. All Rights Reserved. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: Moniker +import flask + + +def factory(global_config, **local_conf): + app = flask.Flask('billingstack.api.versions') + + @app.route('/', methods=['GET']) + def version_list(): + return flask.jsonify({ + "versions": [{ + "id": "v1", + "status": "CURRENT" + }] + }) + + return app diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index c93a1b4..ffd039a 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -22,8 +22,8 @@ def __init__(self): def create_currency(self, ctxt, values): return self.call(ctxt, self.make_msg('create_currency', values=values)) - def list_currency(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_currency', + def list_currencies(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_currencies', criterion=criterion)) def get_currency(self, ctxt, id_): @@ -42,8 +42,8 @@ def delete_currency(self, ctxt, id_): def create_language(self, ctxt, values): return self.call(ctxt, self.make_msg('create_language', values=values)) - def list_language(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_language', + def list_languages(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_languages', criterion=criterion)) def get_language(self, ctxt, id_): @@ -61,8 +61,8 @@ def create_invoice_state(self, ctxt, values): return self.call(ctxt, self.make_msg('create_invoice_state', values=values)) - def list_invoice_state(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_state', + def list_invoice_states(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoice_states', criterion=criterion)) def get_invoice_state(self, ctxt, id_): @@ -91,20 +91,20 @@ def delete_contact_info(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_contact_info', id_=id_)) # PGP - def list_pg_provider(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_provider', + def list_pg_providers(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_providers', criterion=criterion)) def get_pg_provider(self, ctxt, id_): return self.call(ctxt, self.make_msg('get_pg_provider', id_=id_)) # PGM - def list_pg_method(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_method', + def list_pg_methods(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_methods', criterion=criterion)) def get_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('list_pg_method', id_=id_)) + return self.call(ctxt, self.make_msg('list_pg_methods', id_=id_)) # PGC def create_pg_config(self, ctxt, merchant_id, provider_id, values): @@ -112,8 +112,8 @@ def create_pg_config(self, ctxt, merchant_id, provider_id, values): merchant_id=merchant_id, provider_id=provider_id, values=values)) - def list_pg_config(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_config', + def list_pg_configs(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_configs', criterion=criterion)) def get_pg_config(self, ctxt, id_): @@ -132,8 +132,8 @@ def create_payment_method(self, ctxt, customer_id, pg_method_id, values): customer_id=customer_id, pg_method_id=pg_method_id, values=values)) - def list_payment_method(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_payment_method', + def list_payment_methods(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_payment_methods', criterion=criterion)) def get_payment_method(self, ctxt, id_): @@ -150,8 +150,8 @@ def delete_payment_method(self, ctxt, id_): def create_merchant(self, ctxt, values): return self.call(ctxt, self.make_msg('create_merchant', values=values)) - def list_merchant(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_merchant', + def list_merchants(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_merchants', criterion=criterion)) def get_merchant(self, ctxt, id_): @@ -170,8 +170,8 @@ def create_customer(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_customer', merchant_id=merchant_id, values=values)) - def list_customer(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_customer', + def list_customers(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_customers', criterion=criterion)) def get_customer(self, ctxt, id_): @@ -189,8 +189,9 @@ def create_plan(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_plan', merchant_id=merchant_id, values=values)) - def list_plan(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plan', criterion=criterion)) + def list_plans(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_plans', + criterion=criterion)) def get_plan(self, ctxt, id_): return self.call(ctxt, self.make_msg('get_plan', id_=id_)) @@ -207,8 +208,8 @@ def create_plan_item(self, ctxt, values): return self.call(ctxt, self.make_msg('create_plan_item', values=values)) - def list_plan_item(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plan_item', + def list_plan_items(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_plan_items', criterion=criterion)) def get_plan_item(self, ctxt, id_): @@ -226,8 +227,8 @@ def create_product(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_product', merchant_id=merchant_id, values=values)) - def list_product(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_product', + def list_products(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_products', criterion=criterion)) def get_product(self, ctxt, id_): @@ -245,8 +246,8 @@ def create_invoice(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_invoice', merchant_id=merchant_id, values=values)) - def list_invoice(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice', + def list_invoices(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoices', criterion=criterion)) def get_invoice(self, ctxt, id_): @@ -264,8 +265,8 @@ def create_subscription(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_subscription', merchant_id=merchant_id, values=values)) - def list_subscription(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_subscription', + def list_subscriptions(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_subscriptions', criterion=criterion)) def get_subscription(self, ctxt, id_): @@ -277,3 +278,6 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) + + +central_api = CentralAPI() diff --git a/billingstack/openstack/common/sslutils.py b/billingstack/openstack/common/sslutils.py new file mode 100644 index 0000000..af20a22 --- /dev/null +++ b/billingstack/openstack/common/sslutils.py @@ -0,0 +1,80 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 IBM +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import ssl + +from oslo.config import cfg + +from billingstack.openstack.common.gettextutils import _ + + +ssl_opts = [ + cfg.StrOpt('ca_file', + default=None, + help="CA certificate file to use to verify " + "connecting clients"), + cfg.StrOpt('cert_file', + default=None, + help="Certificate file to use when starting " + "the server securely"), + cfg.StrOpt('key_file', + default=None, + help="Private key file to use when starting " + "the server securely"), +] + + +CONF = cfg.CONF +CONF.register_opts(ssl_opts, "ssl") + + +def is_enabled(): + cert_file = CONF.ssl.cert_file + key_file = CONF.ssl.key_file + ca_file = CONF.ssl.ca_file + use_ssl = cert_file or key_file + + if cert_file and not os.path.exists(cert_file): + raise RuntimeError(_("Unable to find cert_file : %s") % cert_file) + + if ca_file and not os.path.exists(ca_file): + raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) + + if key_file and not os.path.exists(key_file): + raise RuntimeError(_("Unable to find key_file : %s") % key_file) + + if use_ssl and (not cert_file or not key_file): + raise RuntimeError(_("When running server in SSL mode, you must " + "specify both a cert_file and key_file " + "option value in your configuration file")) + + return use_ssl + + +def wrap(sock): + ssl_kwargs = { + 'server_side': True, + 'certfile': CONF.ssl.cert_file, + 'keyfile': CONF.ssl.key_file, + 'cert_reqs': ssl.CERT_NONE, + } + + if CONF.ssl.ca_file: + ssl_kwargs['ca_certs'] = CONF.ssl.ca_file + ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED + + return ssl.wrap_socket(sock, **ssl_kwargs) diff --git a/billingstack/openstack/common/wsgi.py b/billingstack/openstack/common/wsgi.py new file mode 100644 index 0000000..78d59d5 --- /dev/null +++ b/billingstack/openstack/common/wsgi.py @@ -0,0 +1,797 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Utility methods for working with WSGI servers.""" + +import eventlet +eventlet.patcher.monkey_patch(all=False, socket=True) + +import datetime +import errno +import socket +import sys +import time + +import eventlet.wsgi +from oslo.config import cfg +import routes +import routes.middleware +import webob.dec +import webob.exc +from xml.dom import minidom +from xml.parsers import expat + +from billingstack.openstack.common import exception +from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import jsonutils +from billingstack.openstack.common import log as logging +from billingstack.openstack.common import service +from billingstack.openstack.common import sslutils +from billingstack.openstack.common import xmlutils + +socket_opts = [ + cfg.IntOpt('backlog', + default=4096, + help="Number of backlog requests to configure the socket with"), + cfg.IntOpt('tcp_keepidle', + default=600, + help="Sets the value of TCP_KEEPIDLE in seconds for each " + "server socket. Not supported on OS X."), +] + +CONF = cfg.CONF +CONF.register_opts(socket_opts) + +LOG = logging.getLogger(__name__) + + +def run_server(application, port): + """Run a WSGI server with the given application.""" + sock = eventlet.listen(('0.0.0.0', port)) + eventlet.wsgi.server(sock, application) + + +class Service(service.Service): + """ + Provides a Service API for wsgi servers. + + This gives us the ability to launch wsgi servers with the + Launcher classes in service.py. + """ + + def __init__(self, application, port, + host='0.0.0.0', backlog=4096, threads=1000): + self.application = application + self._port = port + self._host = host + self._backlog = backlog if backlog else CONF.backlog + super(Service, self).__init__(threads) + + def _get_socket(self, host, port, backlog): + # TODO(dims): eventlet's green dns/socket module does not actually + # support IPv6 in getaddrinfo(). We need to get around this in the + # future or monitor upstream for a fix + info = socket.getaddrinfo(host, + port, + socket.AF_UNSPEC, + socket.SOCK_STREAM)[0] + family = info[0] + bind_addr = info[-1] + + sock = None + retry_until = time.time() + 30 + while not sock and time.time() < retry_until: + try: + sock = eventlet.listen(bind_addr, + backlog=backlog, + family=family) + if sslutils.is_enabled(): + sock = sslutils.wrap(sock) + + except socket.error, err: + if err.args[0] != errno.EADDRINUSE: + raise + eventlet.sleep(0.1) + if not sock: + raise RuntimeError(_("Could not bind to %(host)s:%(port)s " + "after trying for 30 seconds") % + {'host': host, 'port': port}) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + # sockets can hang around forever without keepalive + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + + # This option isn't available in the OS X version of eventlet + if hasattr(socket, 'TCP_KEEPIDLE'): + sock.setsockopt(socket.IPPROTO_TCP, + socket.TCP_KEEPIDLE, + CONF.tcp_keepidle) + + return sock + + def start(self): + """Start serving this service using the provided server instance. + + :returns: None + + """ + super(Service, self).start() + self._socket = self._get_socket(self._host, self._port, self._backlog) + self.tg.add_thread(self._run, self.application, self._socket) + + @property + def backlog(self): + return self._backlog + + @property + def host(self): + return self._socket.getsockname()[0] if self._socket else self._host + + @property + def port(self): + return self._socket.getsockname()[1] if self._socket else self._port + + def stop(self): + """Stop serving this API. + + :returns: None + + """ + super(Service, self).stop() + + def _run(self, application, socket): + """Start a WSGI server in a new green thread.""" + logger = logging.getLogger('eventlet.wsgi') + eventlet.wsgi.server(socket, + application, + custom_pool=self.tg.pool, + log=logging.WritableLogger(logger)) + + +class Middleware(object): + """ + Base WSGI middleware wrapper. These classes require an application to be + initialized that will be called next. By default the middleware will + simply call its wrapped app, or you can override __call__ to customize its + behavior. + """ + + def __init__(self, application): + self.application = application + + def process_request(self, req): + """ + Called on each request. + + If this returns None, the next application down the stack will be + executed. If it returns a response then that response will be returned + and execution will stop here. + """ + return None + + def process_response(self, response): + """Do whatever you'd like to the response.""" + return response + + @webob.dec.wsgify + def __call__(self, req): + response = self.process_request(req) + if response: + return response + response = req.get_response(self.application) + return self.process_response(response) + + +class Debug(Middleware): + """ + Helper class that can be inserted into any WSGI application chain + to get information about the request and response. + """ + + @webob.dec.wsgify + def __call__(self, req): + print ("*" * 40) + " REQUEST ENVIRON" + for key, value in req.environ.items(): + print key, "=", value + print + resp = req.get_response(self.application) + + print ("*" * 40) + " RESPONSE HEADERS" + for (key, value) in resp.headers.iteritems(): + print key, "=", value + print + + resp.app_iter = self.print_generator(resp.app_iter) + + return resp + + @staticmethod + def print_generator(app_iter): + """ + Iterator that prints the contents of a wrapper string iterator + when iterated. + """ + print ("*" * 40) + " BODY" + for part in app_iter: + sys.stdout.write(part) + sys.stdout.flush() + yield part + print + + +class Router(object): + + """ + WSGI middleware that maps incoming requests to WSGI apps. + """ + + def __init__(self, mapper): + """ + Create a router for the given routes.Mapper. + + Each route in `mapper` must specify a 'controller', which is a + WSGI app to call. You'll probably want to specify an 'action' as + well and have your controller be a wsgi.Controller, who will route + the request to the action method. + + Examples: + mapper = routes.Mapper() + sc = ServerController() + + # Explicit mapping of one route to a controller+action + mapper.connect(None, "/svrlist", controller=sc, action="list") + + # Actions are all implicitly defined + mapper.resource("server", "servers", controller=sc) + + # Pointing to an arbitrary WSGI app. You can specify the + # {path_info:.*} parameter so the target app can be handed just that + # section of the URL. + mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) + """ + self.map = mapper + self._router = routes.middleware.RoutesMiddleware(self._dispatch, + self.map) + + @webob.dec.wsgify + def __call__(self, req): + """ + Route the incoming request to a controller based on self.map. + If no match, return a 404. + """ + return self._router + + @staticmethod + @webob.dec.wsgify + def _dispatch(req): + """ + Called by self._router after matching the incoming request to a route + and putting the information into req.environ. Either returns 404 + or the routed WSGI app's response. + """ + match = req.environ['wsgiorg.routing_args'][1] + if not match: + return webob.exc.HTTPNotFound() + app = match['controller'] + return app + + +class Request(webob.Request): + """Add some Openstack API-specific logic to the base webob.Request.""" + + default_request_content_types = ('application/json', 'application/xml') + default_accept_types = ('application/json', 'application/xml') + default_accept_type = 'application/json' + + def best_match_content_type(self, supported_content_types=None): + """Determine the requested response content-type. + + Based on the query extension then the Accept header. + Defaults to default_accept_type if we don't find a preference + + """ + supported_content_types = (supported_content_types or + self.default_accept_types) + + parts = self.path.rsplit('.', 1) + if len(parts) > 1: + ctype = 'application/{0}'.format(parts[1]) + if ctype in supported_content_types: + return ctype + + bm = self.accept.best_match(supported_content_types) + return bm or self.default_accept_type + + def get_content_type(self, allowed_content_types=None): + """Determine content type of the request body. + + Does not do any body introspection, only checks header + + """ + if "Content-Type" not in self.headers: + return None + + content_type = self.content_type + allowed_content_types = (allowed_content_types or + self.default_request_content_types) + + if content_type not in allowed_content_types: + raise exception.InvalidContentType(content_type=content_type) + return content_type + + +class Resource(object): + """ + WSGI app that handles (de)serialization and controller dispatch. + + Reads routing information supplied by RoutesMiddleware and calls + the requested action method upon its deserializer, controller, + and serializer. Those three objects may implement any of the basic + controller action methods (create, update, show, index, delete) + along with any that may be specified in the api router. A 'default' + method may also be implemented to be used in place of any + non-implemented actions. Deserializer methods must accept a request + argument and return a dictionary. Controller methods must accept a + request argument. Additionally, they must also accept keyword + arguments that represent the keys returned by the Deserializer. They + may raise a webob.exc exception or return a dict, which will be + serialized by requested content type. + """ + def __init__(self, controller, deserializer=None, serializer=None): + """ + :param controller: object that implement methods created by routes lib + :param deserializer: object that supports webob request deserialization + through controller-like actions + :param serializer: object that supports webob response serialization + through controller-like actions + """ + self.controller = controller + self.serializer = serializer or ResponseSerializer() + self.deserializer = deserializer or RequestDeserializer() + + @webob.dec.wsgify(RequestClass=Request) + def __call__(self, request): + """WSGI method that controls (de)serialization and method dispatch.""" + + try: + action, action_args, accept = self.deserialize_request(request) + except exception.InvalidContentType: + msg = _("Unsupported Content-Type") + return webob.exc.HTTPUnsupportedMediaType(explanation=msg) + except exception.MalformedRequestBody: + msg = _("Malformed request body") + return webob.exc.HTTPBadRequest(explanation=msg) + + action_result = self.execute_action(action, request, **action_args) + try: + return self.serialize_response(action, action_result, accept) + # return unserializable result (typically a webob exc) + except Exception: + return action_result + + def deserialize_request(self, request): + return self.deserializer.deserialize(request) + + def serialize_response(self, action, action_result, accept): + return self.serializer.serialize(action_result, accept, action) + + def execute_action(self, action, request, **action_args): + return self.dispatch(self.controller, action, request, **action_args) + + def dispatch(self, obj, action, *args, **kwargs): + """Find action-specific method on self and call it.""" + try: + method = getattr(obj, action) + except AttributeError: + method = getattr(obj, 'default') + + return method(*args, **kwargs) + + def get_action_args(self, request_environment): + """Parse dictionary created by routes library.""" + try: + args = request_environment['wsgiorg.routing_args'][1].copy() + except Exception: + return {} + + try: + del args['controller'] + except KeyError: + pass + + try: + del args['format'] + except KeyError: + pass + + return args + + +class ActionDispatcher(object): + """Maps method name to local methods through action name.""" + + def dispatch(self, *args, **kwargs): + """Find and call local method.""" + action = kwargs.pop('action', 'default') + action_method = getattr(self, str(action), self.default) + return action_method(*args, **kwargs) + + def default(self, data): + raise NotImplementedError() + + +class DictSerializer(ActionDispatcher): + """Default request body serialization""" + + def serialize(self, data, action='default'): + return self.dispatch(data, action=action) + + def default(self, data): + return "" + + +class JSONDictSerializer(DictSerializer): + """Default JSON request body serialization""" + + def default(self, data): + def sanitizer(obj): + if isinstance(obj, datetime.datetime): + _dtime = obj - datetime.timedelta(microseconds=obj.microsecond) + return _dtime.isoformat() + return unicode(obj) + return jsonutils.dumps(data, default=sanitizer) + + +class XMLDictSerializer(DictSerializer): + + def __init__(self, metadata=None, xmlns=None): + """ + :param metadata: information needed to deserialize xml into + a dictionary. + :param xmlns: XML namespace to include with serialized xml + """ + super(XMLDictSerializer, self).__init__() + self.metadata = metadata or {} + self.xmlns = xmlns + + def default(self, data): + # We expect data to contain a single key which is the XML root. + root_key = data.keys()[0] + doc = minidom.Document() + node = self._to_xml_node(doc, self.metadata, root_key, data[root_key]) + + return self.to_xml_string(node) + + def to_xml_string(self, node, has_atom=False): + self._add_xmlns(node, has_atom) + return node.toprettyxml(indent=' ', encoding='UTF-8') + + #NOTE (ameade): the has_atom should be removed after all of the + # xml serializers and view builders have been updated to the current + # spec that required all responses include the xmlns:atom, the has_atom + # flag is to prevent current tests from breaking + def _add_xmlns(self, node, has_atom=False): + if self.xmlns is not None: + node.setAttribute('xmlns', self.xmlns) + if has_atom: + node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom") + + def _to_xml_node(self, doc, metadata, nodename, data): + """Recursive method to convert data members to XML nodes.""" + result = doc.createElement(nodename) + + # Set the xml namespace if one is specified + # TODO(justinsb): We could also use prefixes on the keys + xmlns = metadata.get('xmlns', None) + if xmlns: + result.setAttribute('xmlns', xmlns) + + #TODO(bcwaldon): accomplish this without a type-check + if type(data) is list: + collections = metadata.get('list_collections', {}) + if nodename in collections: + metadata = collections[nodename] + for item in data: + node = doc.createElement(metadata['item_name']) + node.setAttribute(metadata['item_key'], str(item)) + result.appendChild(node) + return result + singular = metadata.get('plurals', {}).get(nodename, None) + if singular is None: + if nodename.endswith('s'): + singular = nodename[:-1] + else: + singular = 'item' + for item in data: + node = self._to_xml_node(doc, metadata, singular, item) + result.appendChild(node) + #TODO(bcwaldon): accomplish this without a type-check + elif type(data) is dict: + collections = metadata.get('dict_collections', {}) + if nodename in collections: + metadata = collections[nodename] + for k, v in data.items(): + node = doc.createElement(metadata['item_name']) + node.setAttribute(metadata['item_key'], str(k)) + text = doc.createTextNode(str(v)) + node.appendChild(text) + result.appendChild(node) + return result + attrs = metadata.get('attributes', {}).get(nodename, {}) + for k, v in data.items(): + if k in attrs: + result.setAttribute(k, str(v)) + else: + node = self._to_xml_node(doc, metadata, k, v) + result.appendChild(node) + else: + # Type is atom + node = doc.createTextNode(str(data)) + result.appendChild(node) + return result + + def _create_link_nodes(self, xml_doc, links): + link_nodes = [] + for link in links: + link_node = xml_doc.createElement('atom:link') + link_node.setAttribute('rel', link['rel']) + link_node.setAttribute('href', link['href']) + if 'type' in link: + link_node.setAttribute('type', link['type']) + link_nodes.append(link_node) + return link_nodes + + +class ResponseHeadersSerializer(ActionDispatcher): + """Default response headers serialization""" + + def serialize(self, response, data, action): + self.dispatch(response, data, action=action) + + def default(self, response, data): + response.status_int = 200 + + +class ResponseSerializer(object): + """Encode the necessary pieces into a response object""" + + def __init__(self, body_serializers=None, headers_serializer=None): + self.body_serializers = { + 'application/xml': XMLDictSerializer(), + 'application/json': JSONDictSerializer(), + } + self.body_serializers.update(body_serializers or {}) + + self.headers_serializer = (headers_serializer or + ResponseHeadersSerializer()) + + def serialize(self, response_data, content_type, action='default'): + """Serialize a dict into a string and wrap in a wsgi.Request object. + + :param response_data: dict produced by the Controller + :param content_type: expected mimetype of serialized response body + + """ + response = webob.Response() + self.serialize_headers(response, response_data, action) + self.serialize_body(response, response_data, content_type, action) + return response + + def serialize_headers(self, response, data, action): + self.headers_serializer.serialize(response, data, action) + + def serialize_body(self, response, data, content_type, action): + response.headers['Content-Type'] = content_type + if data is not None: + serializer = self.get_body_serializer(content_type) + response.body = serializer.serialize(data, action) + + def get_body_serializer(self, content_type): + try: + return self.body_serializers[content_type] + except (KeyError, TypeError): + raise exception.InvalidContentType(content_type=content_type) + + +class RequestHeadersDeserializer(ActionDispatcher): + """Default request headers deserializer""" + + def deserialize(self, request, action): + return self.dispatch(request, action=action) + + def default(self, request): + return {} + + +class RequestDeserializer(object): + """Break up a Request object into more useful pieces.""" + + def __init__(self, body_deserializers=None, headers_deserializer=None, + supported_content_types=None): + + self.supported_content_types = supported_content_types + + self.body_deserializers = { + 'application/xml': XMLDeserializer(), + 'application/json': JSONDeserializer(), + } + self.body_deserializers.update(body_deserializers or {}) + + self.headers_deserializer = (headers_deserializer or + RequestHeadersDeserializer()) + + def deserialize(self, request): + """Extract necessary pieces of the request. + + :param request: Request object + :returns: tuple of (expected controller action name, dictionary of + keyword arguments to pass to the controller, the expected + content type of the response) + + """ + action_args = self.get_action_args(request.environ) + action = action_args.pop('action', None) + + action_args.update(self.deserialize_headers(request, action)) + action_args.update(self.deserialize_body(request, action)) + + accept = self.get_expected_content_type(request) + + return (action, action_args, accept) + + def deserialize_headers(self, request, action): + return self.headers_deserializer.deserialize(request, action) + + def deserialize_body(self, request, action): + if not len(request.body) > 0: + LOG.debug(_("Empty body provided in request")) + return {} + + try: + content_type = request.get_content_type() + except exception.InvalidContentType: + LOG.debug(_("Unrecognized Content-Type provided in request")) + raise + + if content_type is None: + LOG.debug(_("No Content-Type provided in request")) + return {} + + try: + deserializer = self.get_body_deserializer(content_type) + except exception.InvalidContentType: + LOG.debug(_("Unable to deserialize body as provided Content-Type")) + raise + + return deserializer.deserialize(request.body, action) + + def get_body_deserializer(self, content_type): + try: + return self.body_deserializers[content_type] + except (KeyError, TypeError): + raise exception.InvalidContentType(content_type=content_type) + + def get_expected_content_type(self, request): + return request.best_match_content_type(self.supported_content_types) + + def get_action_args(self, request_environment): + """Parse dictionary created by routes library.""" + try: + args = request_environment['wsgiorg.routing_args'][1].copy() + except Exception: + return {} + + try: + del args['controller'] + except KeyError: + pass + + try: + del args['format'] + except KeyError: + pass + + return args + + +class TextDeserializer(ActionDispatcher): + """Default request body deserialization""" + + def deserialize(self, datastring, action='default'): + return self.dispatch(datastring, action=action) + + def default(self, datastring): + return {} + + +class JSONDeserializer(TextDeserializer): + + def _from_json(self, datastring): + try: + return jsonutils.loads(datastring) + except ValueError: + msg = _("cannot understand JSON") + raise exception.MalformedRequestBody(reason=msg) + + def default(self, datastring): + return {'body': self._from_json(datastring)} + + +class XMLDeserializer(TextDeserializer): + + def __init__(self, metadata=None): + """ + :param metadata: information needed to deserialize xml into + a dictionary. + """ + super(XMLDeserializer, self).__init__() + self.metadata = metadata or {} + + def _from_xml(self, datastring): + plurals = set(self.metadata.get('plurals', {})) + + try: + node = xmlutils.safe_minidom_parse_string(datastring).childNodes[0] + return {node.nodeName: self._from_xml_node(node, plurals)} + except expat.ExpatError: + msg = _("cannot understand XML") + raise exception.MalformedRequestBody(reason=msg) + + def _from_xml_node(self, node, listnames): + """Convert a minidom node to a simple Python type. + + :param listnames: list of XML node names whose subnodes should + be considered list items. + + """ + + if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: + return node.childNodes[0].nodeValue + elif node.nodeName in listnames: + return [self._from_xml_node(n, listnames) for n in node.childNodes] + else: + result = dict() + for attr in node.attributes.keys(): + result[attr] = node.attributes[attr].nodeValue + for child in node.childNodes: + if child.nodeType != node.TEXT_NODE: + result[child.nodeName] = self._from_xml_node(child, + listnames) + return result + + def find_first_child_named(self, parent, name): + """Search a nodes children for the first child with a given name""" + for node in parent.childNodes: + if node.nodeName == name: + return node + return None + + def find_children_named(self, parent, name): + """Return all of a nodes children who have the given name""" + for node in parent.childNodes: + if node.nodeName == name: + yield node + + def extract_text(self, node): + """Get the text field contained by the given node""" + if len(node.childNodes) == 1: + child = node.childNodes[0] + if child.nodeType == child.TEXT_NODE: + return child.nodeValue + return "" + + def default(self, datastring): + return {'body': self._from_xml(datastring)} diff --git a/billingstack/openstack/common/xmlutils.py b/billingstack/openstack/common/xmlutils.py new file mode 100644 index 0000000..3370048 --- /dev/null +++ b/billingstack/openstack/common/xmlutils.py @@ -0,0 +1,74 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 IBM +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from xml.dom import minidom +from xml.parsers import expat +from xml import sax +from xml.sax import expatreader + + +class ProtectedExpatParser(expatreader.ExpatParser): + """An expat parser which disables DTD's and entities by default.""" + + def __init__(self, forbid_dtd=True, forbid_entities=True, + *args, **kwargs): + # Python 2.x old style class + expatreader.ExpatParser.__init__(self, *args, **kwargs) + self.forbid_dtd = forbid_dtd + self.forbid_entities = forbid_entities + + def start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise ValueError("Inline DTD forbidden") + + def entity_decl(self, entityName, is_parameter_entity, value, base, + systemId, publicId, notationName): + raise ValueError(" entity declaration forbidden") + + def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise ValueError(" unparsed entity forbidden") + + def external_entity_ref(self, context, base, systemId, publicId): + raise ValueError(" external entity forbidden") + + def notation_decl(self, name, base, sysid, pubid): + raise ValueError(" notation forbidden") + + def reset(self): + expatreader.ExpatParser.reset(self) + if self.forbid_dtd: + self._parser.StartDoctypeDeclHandler = self.start_doctype_decl + self._parser.EndDoctypeDeclHandler = None + if self.forbid_entities: + self._parser.EntityDeclHandler = self.entity_decl + self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl + self._parser.ExternalEntityRefHandler = self.external_entity_ref + self._parser.NotationDeclHandler = self.notation_decl + try: + self._parser.SkippedEntityHandler = None + except AttributeError: + # some pyexpat versions do not support SkippedEntity + pass + + +def safe_minidom_parse_string(xml_string): + """Parse an XML string using minidom safely. + + """ + try: + return minidom.parseString(xml_string, parser=ProtectedExpatParser()) + except sax.SAXParseException: + raise expat.ExpatError() diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index ec2b4b4..e91f109 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -1,3 +1,16 @@ +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License import operator from sqlalchemy.orm import exc diff --git a/billingstack/sqlalchemy/model_base.py b/billingstack/sqlalchemy/model_base.py index 4629048..bbd928f 100644 --- a/billingstack/sqlalchemy/model_base.py +++ b/billingstack/sqlalchemy/model_base.py @@ -1,6 +1,7 @@ # Copyright 2012 Hewlett-Packard Development Company, L.P. # # Author: Patrick Galbraith +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -13,7 +14,8 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - +# +# Copied: Moniker from sqlalchemy import Column, DateTime, Unicode, UnicodeText from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import object_mapper diff --git a/billingstack/sqlalchemy/session.py b/billingstack/sqlalchemy/session.py index b83abb3..338d586 100644 --- a/billingstack/sqlalchemy/session.py +++ b/billingstack/sqlalchemy/session.py @@ -13,7 +13,8 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - +# +# Copied: Moniker """Session Handling for SQLAlchemy backend.""" import re diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index c4da5ac..c3af19b 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -87,7 +87,7 @@ def create_currency(self, ctxt, values): self._save(row) return dict(row) - def list_currency(self, ctxt, **kw): + def list_currencies(self, ctxt, **kw): rows = self._list(models.Currency, **kw) return map(dict, rows) @@ -112,7 +112,7 @@ def create_language(self, ctxt, values): self._save(row) return dict(row) - def list_language(self, ctxt, **kw): + def list_languages(self, ctxt, **kw): rows = self._list(models.Language, **kw) return map(dict, rows) @@ -136,7 +136,7 @@ def create_invoice_state(self, ctxt, values): self._save(row) return dict(row) - def list_invoice_state(self, ctxt, **kw): + def list_invoice_states(self, ctxt, **kw): rows = self._list(models.InvoiceState, **kw) return map(dict, rows) @@ -208,7 +208,7 @@ def pg_provider_register(self, ctxt, values, methods=[]): self._save(provider) return self._dict(provider, extra=['methods']) - def list_pg_provider(self, ctxt, **kw): + def list_pg_providers(self, ctxt, **kw): """ List available PG Providers """ @@ -237,7 +237,7 @@ def _set_provider_methods(self, ctxt, provider, config_methods): """ Helper method for setting the Methods for a Provider """ - rows = self.list_pg_method(ctxt, criterion={"owner_id": None}) + rows = self.list_pg_methods(ctxt, criterion={"owner_id": None}) system_methods = self._kv_rows(rows, key=models.PGMethod.make_key) existing = self._get_provider_methods(provider) @@ -278,7 +278,7 @@ def create_pg_method(self, ctxt, values): self._save(row) return dict(row) - def list_pg_method(self, ctxt, **kw): + def list_pg_methods(self, ctxt, **kw): return self._list(models.PGMethod, **kw) def get_pg_method(self, ctxt, id_): @@ -303,7 +303,7 @@ def create_pg_config(self, ctxt, merchant_id, provider_id, values): self._save(row) return dict(row) - def list_pg_config(self, ctxt, **kw): + def list_pg_configs(self, ctxt, **kw): rows = self._list(models.PGConfig, **kw) return map(dict, rows) @@ -333,7 +333,7 @@ def create_payment_method(self, ctxt, customer_id, pg_method_id, values): self._save(row) return self._dict(row, extra=['provider_method']) - def list_payment_method(self, ctxt, **kw): + def list_payment_methods(self, ctxt, **kw): rows = self._list(models.PaymentMethod, **kw) return [self._dict(row, extra=['provider_method']) for row in rows] @@ -355,7 +355,7 @@ def create_merchant(self, ctxt, values): self._save(row) return dict(row) - def list_merchant(self, ctxt, **kw): + def list_merchants(self, ctxt, **kw): rows = self._list(models.Merchant, **kw) return map(dict, rows) @@ -393,7 +393,7 @@ def create_customer(self, ctxt, merchant_id, values): self._save(customer) return self._customer(customer) - def list_customer(self, ctxt, **kw): + def list_customers(self, ctxt, **kw): rows = self._list(models.Customer, **kw) return map(dict, rows) @@ -442,7 +442,7 @@ def create_plan(self, ctxt, merchant_id, values): self._save(plan) return self._plan(plan) - def list_plan(self, ctxt, **kw): + def list_plans(self, ctxt, **kw): """ List Plan @@ -498,7 +498,7 @@ def _update_plan_item(self, item, values, save=True): row.update(values) return self._save(row, save=save) - def list_plan_item(self, ctxt, **kw): + def list_plan_items(self, ctxt, **kw): return self._list(models.PlanItem, **kw) def get_plan_item(self, ctxt, id_): @@ -535,7 +535,7 @@ def create_product(self, ctxt, merchant_id, values): self._save(product) return self._product(product) - def list_product(self, ctxt, **kw): + def list_products(self, ctxt, **kw): """ List Products @@ -598,7 +598,7 @@ def create_invoice(self, ctxt, merchant_id, values): self._save(invoice) return self._invoice(invoice) - def list_invoice(self, ctxt, **kw): + def list_invoices(self, ctxt, **kw): """ List Invoices """ @@ -655,7 +655,7 @@ def create_subscription(self, ctxt, customer_id, values): self._save(subscription) return self._subscription(subscription) - def list_subscription(self, ctxt, **kw): + def list_subscriptions(self, ctxt, **kw): """ List Subscriptions diff --git a/billingstack/tests/api/__init__.py b/billingstack/tests/api/__init__.py index 8b13789..e69de29 100644 --- a/billingstack/tests/api/__init__.py +++ b/billingstack/tests/api/__init__.py @@ -1 +0,0 @@ - diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 4915039..9668f29 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -18,11 +18,8 @@ """ Base classes for API tests. """ -import os - -from pecan import set_config -from pecan.testing import load_test_app - +from billingstack.api.v1 import factory +from billingstack.api.auth import NoAuthContextMiddleware from billingstack.openstack.common import jsonutils as json from billingstack.openstack.common import log from billingstack.tests.base import TestCase @@ -31,19 +28,31 @@ LOG = log.getLogger(__name__) -class PecanTestMixin(object): - PATH_PREFIX = '' +class APITestMixin(object): + PATH_PREFIX = None - path = "" + path = None def item_path(self, *args): url = self.path + '/%s' return url % args - def make_path(self, path): + def _ensure_slash(self, path): if not path.startswith('/'): path = '/' + path - return self.PATH_PREFIX + path + return path + + def make_path(self, path): + path = self._ensure_slash(path) + if self.PATH_PREFIX: + path = path + self._ensure_slash(self.PATH_PREFIX) + return path + + def load_content(self, response): + try: + response.json = json.loads(response.data) + except ValueError: + response.json = None def _query(self, queries): query_params = {'q.field': [], @@ -62,82 +71,84 @@ def _params(self, params, queries): all_params.update(self._query(queries)) return all_params - def get(self, path, headers=None, - q=[], status_code=200, **params): + def get(self, path, headers=None, q=[], status_code=200, + content_type="application/json", **params): path = self.make_path(path) all_params = self._params(params, q) LOG.debug('GET: %s %r', path, all_params) - response = self.app.get(path, - params=all_params, - headers=headers) + response = self.client.get(path, + content_type=content_type, + query_string=all_params, + headers=headers) - LOG.debug('GOT RESPONSE: %s', response) + LOG.debug('GOT RESPONSE: %s', response.data) self.assertEqual(response.status_code, status_code) + self.load_content(response) + return response def post(self, path, data, headers=None, content_type="application/json", - q=[], status_code=200): + q=[], status_code=202): path = self.make_path(path) LOG.debug('POST: %s %s', path, data) content = json.dumps(data) - response = self.app.post( + response = self.client.post( path, - content, + data=content, content_type=content_type, headers=headers) - LOG.debug('POST RESPONSE: %r' % response.body) + LOG.debug('POST RESPONSE: %r' % response.data) self.assertEqual(response.status_code, status_code) + self.load_content(response) + return response def put(self, path, data, headers=None, content_type="application/json", - q=[], status_code=200, **params): + q=[], status_code=202, **params): path = self.make_path(path) LOG.debug('PUT: %s %s', path, data) content = json.dumps(data) - response = self.app.put( + response = self.client.put( path, - content, + data=content, content_type=content_type, headers=headers) self.assertEqual(response.status_code, status_code) - LOG.debug('PUT RESPONSE: %r' % response.body) + LOG.debug('PUT RESPONSE: %r' % response.data) + + self.load_content(response) return response - def delete(self, path, status_code=200, headers=None, q=[], **params): + def delete(self, path, status_code=204, headers=None, q=[], **params): path = self.make_path(path) all_params = self._params(params, q) LOG.debug('DELETE: %s %r', path, all_params) - response = self.app.delete(path, params=all_params) + response = self.client.delete(path, query_string=all_params) - LOG.debug('DELETE RESPONSE: %r' % response.body) + #LOG.debug('DELETE RESPONSE: %r' % response.body) self.assertEqual(response.status_code, status_code) return response - def make_app(self, enable_acl=False): - # This is done like this because if you import load_test_app in 2 diff - # modules it will fail with a PECAN_CONFIG error. - return load_test_app(self.make_config(enable_acl=enable_acl)) - -class FunctionalTest(TestCase, PecanTestMixin): +class FunctionalTest(TestCase, APITestMixin): """ billingstack.api base test """ @@ -150,49 +161,10 @@ def setUp(self): self.setSamples() - self.app = self.make_app() + self.app = factory({}) + self.app.wsgi_app = NoAuthContextMiddleware(self.app.wsgi_app) + self.client = self.app.test_client() def tearDown(self): + self.central_service.stop() super(FunctionalTest, self).tearDown() - set_config({}, overwrite=True) - - - def make_config(self, enable_acl=True): - root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), - '..', - '..', - ) - ) - - return { - 'app': { - 'root': 'billingstack.api.root.RootController', - 'modules': ['billingstack.api'], - 'static_root': '%s/public' % root_dir, - 'template_path': '%s/billingstack/api/templates' % root_dir, - 'enable_acl': enable_acl, - }, - - 'logging': { - 'loggers': { - 'root': {'level': 'INFO', 'handlers': ['console']}, - 'wsme': {'level': 'INFO', 'handlers': ['console']}, - 'billingstack': {'level': 'DEBUG', - 'handlers': ['console'], - }, - }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - } - }, - 'formatters': { - 'simple': { - 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' - '[%(threadName)s] %(message)s') - } - }, - }, - } diff --git a/billingstack/tests/api/v1/__init__.py b/billingstack/tests/api/v1/__init__.py index 8b13789..e69de29 100644 --- a/billingstack/tests/api/v1/__init__.py +++ b/billingstack/tests/api/v1/__init__.py @@ -1 +0,0 @@ - diff --git a/billingstack/tests/api/v1/base.py b/billingstack/tests/api/v1/base.py index 61d8b14..e69de29 100644 --- a/billingstack/tests/api/v1/base.py +++ b/billingstack/tests/api/v1/base.py @@ -1,23 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 New Dream Network, LLC (DreamHost) -# -# Author: Doug Hellmann -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from billingstack.tests.api import base - - -class FunctionalTest(base.FunctionalTest): - PATH_PREFIX = '/v1' diff --git a/billingstack/tests/api/v1/test_currency.py b/billingstack/tests/api/v1/test_currency.py index 1e248b4..dea455e 100644 --- a/billingstack/tests/api/v1/test_currency.py +++ b/billingstack/tests/api/v1/test_currency.py @@ -19,7 +19,7 @@ import logging -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest LOG = logging.getLogger(__name__) @@ -35,7 +35,7 @@ def test_create_currency(self): self.assertData(fixture, resp.json) - def test_list_currency(self): + def test_list_currencies(self): resp = self.get(self.path) @@ -63,5 +63,5 @@ def test_delete_currency(self): url = self.item_path(currency['name']) self.delete(url) - data = self.central_service.list_currency(self.admin_ctxt) + data = self.central_service.list_currencies(self.admin_ctxt) self.assertLen(1, data) diff --git a/billingstack/tests/api/v1/test_customer.py b/billingstack/tests/api/v1/test_customer.py index 4b83ce2..44ef24e 100644 --- a/billingstack/tests/api/v1/test_customer.py +++ b/billingstack/tests/api/v1/test_customer.py @@ -17,7 +17,7 @@ Test Customers. """ -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest from billingstack.api.v1.models import Customer @@ -40,7 +40,7 @@ def test_create_customer(self): self.assertData(expected, resp.json) - def test_list_customer(self): + def test_list_customers(self): url = self.path % self.merchant['id'] resp = self.get(url) @@ -79,4 +79,4 @@ def test_delete_customer(self): url = self.item_path(self.merchant['id'], customer['id']) self.delete(url) - self.assertLen(0, self.central_service.list_customer(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_customers(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_invoice_state.py b/billingstack/tests/api/v1/test_invoice_state.py index 04edb49..97c74fa 100644 --- a/billingstack/tests/api/v1/test_invoice_state.py +++ b/billingstack/tests/api/v1/test_invoice_state.py @@ -19,7 +19,7 @@ import logging -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest LOG = logging.getLogger(__name__) @@ -35,7 +35,7 @@ def test_create_invoice_state(self): self.assertData(fixture, resp.json) - def test_list_invoice_state(self): + def test_list_invoice_states(self): self.create_invoice_state() resp = self.get(self.path) @@ -64,5 +64,5 @@ def test_delete_invoice_state(self): url = self.item_path(state['name']) self.delete(url) - data = self.central_service.list_invoice_state(self.admin_ctxt) + data = self.central_service.list_invoice_states(self.admin_ctxt) self.assertLen(0, data) diff --git a/billingstack/tests/api/v1/test_language.py b/billingstack/tests/api/v1/test_language.py index c57d5b8..70a329f 100644 --- a/billingstack/tests/api/v1/test_language.py +++ b/billingstack/tests/api/v1/test_language.py @@ -19,7 +19,7 @@ import logging -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest LOG = logging.getLogger(__name__) @@ -35,7 +35,7 @@ def test_create_language(self): self.assertData(fixture, resp.json) - def test_list_language(self): + def test_list_languages(self): resp = self.get(self.path) @@ -63,5 +63,5 @@ def test_delete_language(self): url = self.item_path(language['name']) self.delete(url) - data = self.central_service.list_language(self.admin_ctxt) + data = self.central_service.list_languages(self.admin_ctxt) self.assertLen(1, data) diff --git a/billingstack/tests/api/v1/test_merchant.py b/billingstack/tests/api/v1/test_merchant.py index 27c27d8..647d929 100644 --- a/billingstack/tests/api/v1/test_merchant.py +++ b/billingstack/tests/api/v1/test_merchant.py @@ -17,7 +17,7 @@ Test Merchants """ -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest from billingstack.api.v1.models import Merchant @@ -37,7 +37,7 @@ def test_create_merchant(self): self.assertData(expected, resp.json) - def test_list_merchant(self): + def test_list_merchants(self): resp = self.get('merchants') self.assertLen(1, resp.json) @@ -57,4 +57,4 @@ def test_update_merchant(self): def test_delete_merchant(self): self.delete('merchants/' + self.merchant['id']) - self.assertLen(0, self.central_service.list_merchant(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_merchants(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_plan.py b/billingstack/tests/api/v1/test_plan.py index b155415..0dd93d5 100644 --- a/billingstack/tests/api/v1/test_plan.py +++ b/billingstack/tests/api/v1/test_plan.py @@ -17,7 +17,7 @@ Test Plans """ -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest class TestPlan(FunctionalTest): @@ -33,7 +33,7 @@ def test_create_plan(self): self.assertData(fixture, resp.json) - def test_list_plan(self): + def test_list_plans(self): self.create_plan(self.merchant['id']) url = self.path % self.merchant['id'] @@ -64,4 +64,4 @@ def test_delete_plan(self): url = self.item_path(self.merchant['id'], plan['id']) self.delete(url) - self.assertLen(0, self.central_service.list_plan(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_plans(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_product.py b/billingstack/tests/api/v1/test_product.py index 0ec334b..c539c0f 100644 --- a/billingstack/tests/api/v1/test_product.py +++ b/billingstack/tests/api/v1/test_product.py @@ -19,7 +19,7 @@ import logging -from billingstack.tests.api.v1.base import FunctionalTest +from billingstack.tests.api.base import FunctionalTest LOG = logging.getLogger(__name__) @@ -36,7 +36,7 @@ def test_create_product(self): self.assertData(fixture, resp.json) - def test_list_product(self): + def test_list_products(self): self.create_product(self.merchant['id']) url = self.path % self.merchant['id'] @@ -67,4 +67,4 @@ def test_delete_product(self): url = self.item_path(self.merchant['id'], product['id']) self.delete(url) - self.assertLen(0, self.central_service.list_product(self.admin_ctxt)) + self.assertLen(0, self.central_service.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/identity/test_api.py b/billingstack/tests/identity/test_api.py index e42a931..bf5bff6 100644 --- a/billingstack/tests/identity/test_api.py +++ b/billingstack/tests/identity/test_api.py @@ -6,7 +6,6 @@ from billingstack.samples import get_samples from billingstack.identity.base import IdentityPlugin -from billingstack.tests.api.base import PecanTestMixin from billingstack.tests.base import BaseTestCase @@ -20,10 +19,13 @@ } -class IdentityAPITest(BaseTestCase, PecanTestMixin): +# FIXME: Remove or keep +class IdentityAPITest(BaseTestCase): """ billingstack.api base test """ + + __test__ = False PATH_PREFIX = '/v1' def setUp(self): diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index e2a9a93..7acdf76 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -249,7 +249,7 @@ def test_get_payment_method(self): self.assertData(expected, actual) # TODO(ekarlso): Make this test more extensive? - def test_list_payment_method(self): + def test_list_payment_methods(self): # Setup a PGP with it's sample methods _, provider = self.pg_provider_register() m_id = provider['methods'][0]['id'] @@ -257,7 +257,7 @@ def test_list_payment_method(self): # Add two Customers with some methods _, customer1 = self.create_customer(self.merchant['id']) self.create_payment_method(customer1['id'], m_id) - rows = self.storage_conn.list_payment_method( + rows = self.storage_conn.list_payment_methods( self.admin_ctxt, criterion={'customer_id': customer1['id']}) self.assertLen(1, rows) @@ -266,7 +266,7 @@ def test_list_payment_method(self): self.create_payment_method(customer2['id'], m_id) self.create_payment_method(customer2['id'], m_id) - rows = self.storage_conn.list_payment_method( + rows = self.storage_conn.list_payment_methods( self.admin_ctxt, criterion={'customer_id': customer2['id']}) self.assertLen(2, rows) diff --git a/billingstack/api/root.py b/billingstack/wsgi.py similarity index 52% rename from billingstack/api/root.py rename to billingstack/wsgi.py index 3a6aab3..890c185 100644 --- a/billingstack/api/root.py +++ b/billingstack/wsgi.py @@ -1,29 +1,29 @@ -# -*- encoding: utf-8 -*- +# Copyright 2012 Managed I.T. # -# Copyright © 2012 Woorea Solutions, S.L -# -# Author: Luis Gervaso +# Author: Kiall Mac Innes # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# +# Copied: Moniker +from billingstack.openstack.common import wsgi -from pecan import expose -from . import v1 +class Middleware(wsgi.Middleware): + @classmethod + def factory(cls, global_config, **local_conf): + """ Used for paste app factories in paste.deploy config files """ -class RootController(object): - v1 = v1.V1Controller() + def _factory(app): + return cls(app, **local_conf) - @expose(generic=True, template='index.html') - def index(self): - # FIXME: Return version information - return dict() + return _factory diff --git a/bin/billingstack-api b/bin/billingstack-api index ce82f48..71c34c0 100644 --- a/bin/billingstack-api +++ b/bin/billingstack-api @@ -1,9 +1,7 @@ #!/usr/bin/env python -# -*- encoding: utf-8 -*- +# Copyright 2012 Managed I.T. # -# Copyright © 2012 New Dream Network, LLC (DreamHost) -# -# Author: Doug Hellmann +# Author: Kiall Mac Innes # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -16,37 +14,23 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -"""Set up the development API server. -""" -import os +# +# Copied: Moniker import sys -from wsgiref import simple_server +import eventlet from oslo.config import cfg +from billingstack.openstack.common import log as logging +from billingstack.openstack.common import service +from billingstack import utils +from billingstack.api import service as api_service -from billingstack.api import app -from billingstack import service - - -if __name__ == '__main__': - # Parse OpenStack config file and command line options, then - # configure logging. - service.prepare_service(sys.argv) - - # Build the WSGI app - root = app.setup_app() - - # Create the WSGI server and start it - host = cfg.CONF['service:api'].api_listen - port = int(cfg.CONF['service:api'].api_port) - srv = simple_server.make_server(host, port, root) +eventlet.monkey_patch() - print 'Starting server in PID %s' % os.getpid() +utils.read_config('billingstack', sys.argv) - print "serving on http://%s:%s" % (host, port) +logging.setup('billingstack') - try: - srv.serve_forever() - except KeyboardInterrupt: - # allow CTRL+C to shutdown without an error - pass +launcher = service.launch(api_service.Service(), + cfg.CONF['service:api'].workers) +launcher.wait() diff --git a/etc/billingstack/api-paste.ini.sample b/etc/billingstack/api-paste.ini.sample new file mode 100644 index 0000000..b43faaf --- /dev/null +++ b/etc/billingstack/api-paste.ini.sample @@ -0,0 +1,33 @@ +[composite:bs_api] +use = egg:Paste#urlmap +/: bs_api_versions +/v1: bs_core_api_v1 + +[app:bs_api_versions] +paste.app_factory = billingstack.api.versions:factory + +[composite:bs_core_api_v1] +use = call:billingstack.api.auth:pipeline_factory +noauth = noauthcontext bs_core_app_v1 +keystone = authtoken keystonecontext bs_core_app_v1 + +[app:bs_core_app_v1] +paste.app_factory = billingstack.api.v1:factory + +[filter:noauthcontext] +paste.filter_factory = billingstack.api.auth:NoAuthContextMiddleware.factory + +#[filter:keystonecontext] +#paste.filter_factory = billingstack.api.auth:KeystoneContextMiddleware.factory + +[filter:authtoken] +paste.filter_factory = keystoneclient.middleware.auth_token:filter_factory +service_protocol = http +service_host = 127.0.0.1 +service_port = 5000 +auth_host = 127.0.0.1 +auth_port = 35357 +auth_protocol = http +admin_tenant_name = %SERVICE_TENANT_NAME% +admin_user = %SERVICE_USER% +admin_password = %SERVICE_PASSWORD% diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 49e7736..0446ca0 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -16,6 +16,9 @@ debug = True identity_driver = internal +# Enabled API Version 1 extensions +# #enabled_extensions_v1 = none + [service:api] # Address to bind the API server # api_host = 0.0.0.0 diff --git a/setup.py b/setup.py index c173ee7..48d8fb8 100644 --- a/setup.py +++ b/setup.py @@ -64,6 +64,8 @@ pg-register = billingstack.manage.provider:ProvidersRegister pg-list = billingstack.manage.provider:ProvidersList + [billingstack.api.v1.extensions] + [billingstack.identity_plugin] sqlalchemy = billingstack.identity.impl_sqlalchemy:SQLAlchemyPlugin diff --git a/tools/pip-requires b/tools/pip-requires index 02a8827..97ad0c5 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -1,12 +1,17 @@ -WebOb>=1.2 +Flask +Paste +PasteDeploy eventlet -#pecan --e git://github.com/ryanpetrello/pecan.git@next#egg=pecan stevedore argparse -e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme -anyjson>=0.2.4 pycountry iso8601 cliff http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config + +# From OpenStack Common +routes>=1.12.3 +iso8601>=0.1.4 +WebOb>=1.0.8 +extras From c0da649ca99f57ec704668e9aa7324f102a290e2 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 08:26:18 +0000 Subject: [PATCH 034/182] Remove duplicate --- tools/pip-requires | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/pip-requires b/tools/pip-requires index 97ad0c5..7efb5ca 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -6,7 +6,6 @@ stevedore argparse -e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme pycountry -iso8601 cliff http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config From 99cfe74521853567cd08a6d98256d313997fd7fc Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 09:12:32 +0000 Subject: [PATCH 035/182] Some RC files --- .coveragerc | 7 +++++++ .pylintrc | 42 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 .coveragerc create mode 100644 .pylintrc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..8120c13 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,7 @@ +[run] +branch = True +source = billingstack +omit = billingstack/tests/*,billingstack/openstack/* + +[report] +ignore-errors = True diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..93fab95 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,42 @@ +# The format of this file isn't really documented; just use --generate-rcfile +[MASTER] +# Add to the black list. It should be a base name, not a +# path. You may set this option multiple times. +ignore=test + +[Messages Control] +# NOTE(justinsb): We might want to have a 2nd strict pylintrc in future +# C0111: Don't require docstrings on every method +# W0511: TODOs in code comments are fine. +# W0142: *args and **kwargs are fine. +# W0622: Redefining id is fine. +disable=C0111,W0511,W0142,W0622 + +[Basic] +# Variable names can be 1 to 31 characters long, with lowercase and underscores +variable-rgx=[a-z_][a-z0-9_]{0,30}$ + +# Argument names can be 2 to 31 characters long, with lowercase and underscores +argument-rgx=[a-z_][a-z0-9_]{1,30}$ + +# Method names should be at least 3 characters long +# and be lowecased with underscores +method-rgx=([a-z_][a-z0-9_]{2,50}|setUp|tearDown)$ + +# Module names matching billingstack-* are ok (files in bin/) +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(billingstack-[a-z0-9_-]+))$ + +# Don't require docstrings on tests. +no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$ + +[Design] +max-public-methods=100 +min-public-methods=0 +max-args=6 + +[Variables] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +# _ is used by our localization +additional-builtins=_ From c2711eb0b6f89bd5b5cf0fa4aecab56c79488e98 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 09:48:48 +0000 Subject: [PATCH 036/182] Copy some helper scripts from Quantum --- tools/install_venv.py | 76 ++++++++++++ tools/install_venv_common.py | 224 +++++++++++++++++++++++++++++++++++ tools/patch_tox_venv.py | 38 ++++++ tools/with_venv.sh | 21 ++++ 4 files changed, 359 insertions(+) create mode 100644 tools/install_venv.py create mode 100644 tools/install_venv_common.py create mode 100644 tools/patch_tox_venv.py create mode 100755 tools/with_venv.sh diff --git a/tools/install_venv.py b/tools/install_venv.py new file mode 100644 index 0000000..096a95b --- /dev/null +++ b/tools/install_venv.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2010 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. +# All Rights Reserved. +# +# Copyright 2010 OpenStack Foundation. +# Copyright 2013 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Installation script for BillingStack's development virtualenv +""" + +import os +import subprocess +import sys + +import install_venv_common as install_venv + + +def print_help(): + help = """ + BillingStack development environment setup is complete. + + BillingStack development uses virtualenv to track and manage Python dependencies + while in development and testing. + + To activate the BillingStack virtualenv for the extent of your current shell + session you can run: + + $ source .venv/bin/activate + + Or, if you prefer, you can run commands in the virtualenv on a case by case + basis by running: + + $ tools/with_venv.sh + + Also, make test will automatically use the virtualenv. + """ + print help + + +def main(argv): + root = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + venv = os.path.join(root, '.venv') + pip_requires = os.path.join(root, 'tools', 'pip-requires') + pip_options = os.path.join(root, 'tools', 'pip-options') + test_requires = os.path.join(root, 'tools', 'test-requires') + py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1]) + project = 'quantum' + install = install_venv.InstallVenv(root, venv, pip_requires, pip_options, test_requires, + py_version, project) + options = install.parse_args(argv) + install.check_python_version() + install.check_dependencies() + install.create_virtualenv(no_site_packages=options.no_site_packages) + install.install_dependencies() + install.post_process() + print_help() + + +if __name__ == '__main__': + main(sys.argv) diff --git a/tools/install_venv_common.py b/tools/install_venv_common.py new file mode 100644 index 0000000..8123f89 --- /dev/null +++ b/tools/install_venv_common.py @@ -0,0 +1,224 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 OpenStack Foundation +# Copyright 2013 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Provides methods needed by installation script for OpenStack development +virtual environments. + +Synced in from openstack-common +""" + +import argparse +import os +import subprocess +import sys + + +class InstallVenv(object): + + def __init__(self, root, venv, pip_requires, pip_options, test_requires, + py_version, project): + self.root = root + self.venv = venv + self.pip_requires = pip_requires + self.pip_options = pip_options + self.test_requires = test_requires + self.py_version = py_version + self.project = project + + def die(self, message, *args): + print >> sys.stderr, message % args + sys.exit(1) + + def check_python_version(self): + if sys.version_info < (2, 6): + self.die("Need Python Version >= 2.6") + + def run_command_with_code(self, cmd, redirect_output=True, + check_exit_code=True): + """Runs a command in an out-of-process shell. + + Returns the output of that command. Working directory is self.root. + """ + if redirect_output: + stdout = subprocess.PIPE + else: + stdout = None + + proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout) + output = proc.communicate()[0] + if check_exit_code and proc.returncode != 0: + self.die('Command "%s" failed.\n%s', ' '.join(cmd), output) + return (output, proc.returncode) + + def run_command(self, cmd, redirect_output=True, check_exit_code=True): + return self.run_command_with_code(cmd, redirect_output, + check_exit_code)[0] + + def get_distro(self): + if (os.path.exists('/etc/fedora-release') or + os.path.exists('/etc/redhat-release')): + return Fedora(self.root, self.venv, self.pip_requires, + self.pip_options, self.test_requires, + self.py_version, self.project) + else: + return Distro(self.root, self.venv, self.pip_requires, + self.pip_options, self.test_requires, + self.py_version, self.project) + + def check_dependencies(self): + self.get_distro().install_virtualenv() + + def create_virtualenv(self, no_site_packages=True): + """Creates the virtual environment and installs PIP. + + Creates the virtual environment and installs PIP only into the + virtual environment. + """ + if not os.path.isdir(self.venv): + print 'Creating venv...', + if no_site_packages: + self.run_command(['virtualenv', '-q', '--no-site-packages', + self.venv]) + else: + self.run_command(['virtualenv', '-q', self.venv]) + print 'done.' + print 'Installing pip in venv...', + if not self.run_command(['tools/with_venv.sh', 'easy_install', + 'pip>1.0']).strip(): + self.die("Failed to install pip.") + print 'done.' + else: + print "venv already exists..." + pass + + def pip_install(self, *args): + self.run_command(['tools/with_venv.sh', + 'pip', 'install', '--upgrade'] + list(args), + redirect_output=False) + + def install_dependencies(self): + print 'Installing dependencies with pip (this can take a while)...' + + # First things first, make sure our venv has the latest pip and + # distribute. + # NOTE: we keep pip at version 1.1 since the most recent version causes + # the .venv creation to fail. See: + # https://bugs.launchpad.net/nova/+bug/1047120 + self.pip_install('pip==1.1') + self.pip_install('distribute') + + # Install greenlet by hand - just listing it in the requires file does + # not + # get it installed in the right order + self.pip_install('greenlet') + + self.pip_install('-r', self.pip_requires) + self.pip_install('-r', self.pip_options) + self.pip_install('-r', self.test_requires) + + def post_process(self): + self.get_distro().post_process() + + def parse_args(self, argv): + """Parses command-line arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument('-n', '--no-site-packages', + action='store_true', + help="Do not inherit packages from global Python " + "install") + return parser.parse_args(argv[1:]) + + +class Distro(InstallVenv): + + def check_cmd(self, cmd): + return bool(self.run_command(['which', cmd], + check_exit_code=False).strip()) + + def install_virtualenv(self): + if self.check_cmd('virtualenv'): + return + + if self.check_cmd('easy_install'): + print 'Installing virtualenv via easy_install...', + if self.run_command(['easy_install', 'virtualenv']): + print 'Succeeded' + return + else: + print 'Failed' + + self.die('ERROR: virtualenv not found.\n\n%s development' + ' requires virtualenv, please install it using your' + ' favorite package management tool' % self.project) + + def post_process(self): + """Any distribution-specific post-processing gets done here. + + In particular, this is useful for applying patches to code inside + the venv. + """ + pass + + +class Fedora(Distro): + """This covers all Fedora-based distributions. + + Includes: Fedora, RHEL, CentOS, Scientific Linux + """ + + def check_pkg(self, pkg): + return self.run_command_with_code(['rpm', '-q', pkg], + check_exit_code=False)[1] == 0 + + def yum_install(self, pkg, **kwargs): + print "Attempting to install '%s' via yum" % pkg + self.run_command(['sudo', 'yum', 'install', '-y', pkg], **kwargs) + + def apply_patch(self, originalfile, patchfile): + self.run_command(['patch', '-N', originalfile, patchfile], + check_exit_code=False) + + def install_virtualenv(self): + if self.check_cmd('virtualenv'): + return + + if not self.check_pkg('python-virtualenv'): + self.yum_install('python-virtualenv', check_exit_code=False) + + super(Fedora, self).install_virtualenv() + + def post_process(self): + """Workaround for a bug in eventlet. + + This currently affects RHEL6.1, but the fix can safely be + applied to all RHEL and Fedora distributions. + + This can be removed when the fix is applied upstream. + + Nova: https://bugs.launchpad.net/nova/+bug/884915 + Upstream: https://bitbucket.org/which_linden/eventlet/issue/89 + """ + + # Install "patch" program if it's not there + if not self.check_pkg('patch'): + self.yum_install('patch') + + # Apply the eventlet patch + self.apply_patch(os.path.join(self.venv, 'lib', self.py_version, + 'site-packages', + 'eventlet/green/subprocess.py'), + 'contrib/redhat-eventlet.patch') diff --git a/tools/patch_tox_venv.py b/tools/patch_tox_venv.py new file mode 100644 index 0000000..bcd1fc5 --- /dev/null +++ b/tools/patch_tox_venv.py @@ -0,0 +1,38 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import sys + +import install_venv_common as install_venv + + +def main(argv): + root = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + + venv = os.environ['VIRTUAL_ENV'] + + pip_requires = os.path.join(root, 'tools', 'pip-requires') + test_requires = os.path.join(root, 'tools', 'test-requires') + py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1]) + project = 'Quantum' + install = install_venv.InstallVenv(root, venv, pip_requires, test_requires, + py_version, project) + #NOTE(dprince): For Tox we only run post_process (which patches files, etc) + install.post_process() + +if __name__ == '__main__': + main(sys.argv) diff --git a/tools/with_venv.sh b/tools/with_venv.sh new file mode 100755 index 0000000..5c4a271 --- /dev/null +++ b/tools/with_venv.sh @@ -0,0 +1,21 @@ +#!/bin/bash +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +TOOLS=`dirname $0` +VENV=$TOOLS/../.venv +source $VENV/bin/activate && $@ From 529e5ab0e72cde3b42140a8c487952c015695912 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 13:00:49 +0000 Subject: [PATCH 037/182] Update docs --- doc/source/developing.rst | 66 +++++++++++++++++++++++++++ doc/source/index.rst | 1 + doc/source/install/common.rst | 85 +++++++++++++++++++++++++++++++++++ doc/source/install/index.rst | 2 + doc/source/install/manual.rst | 24 ---------- doc/source/install/pgp.rst | 61 +++++++++++++++++++++++++ 6 files changed, 215 insertions(+), 24 deletions(-) create mode 100644 doc/source/developing.rst create mode 100644 doc/source/install/common.rst create mode 100644 doc/source/install/pgp.rst diff --git a/doc/source/developing.rst b/doc/source/developing.rst new file mode 100644 index 0000000..6e4e991 --- /dev/null +++ b/doc/source/developing.rst @@ -0,0 +1,66 @@ +.. + Copyright 2013 Endre Karlson + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _developing: + +======================= +Developing BillingStack +======================= + + +Setting up a development environment +==================================== + +.. index:: + double: development; env + +There are 2 ways to setting up a development environment +* :doc:install/manual - Manual setup for a more distributed / semi production env +* This: :ref:`development-env` + +1. Clone the repo - see :ref:`cloning-git` for generic information:: + + $ git clone http://github.com/billingstack/billingstack + +2. Change directory to the BS directory:: + + $ cd billingstack + +3. Setup a virtualenv with all deps included for the core:: + + $ python tools/install_venv.py + +Now wait for it to be ready ( Take a coffe break? ) + +3. Active the virtualenv:: + + $ source .venv/bin/activate + +4. You're ready to have fun! + + +Running tests +============= + +Using tox you can test towards multiple different isolated environments. + +For example if you want to test your PEP8 coverage that is needed to pass for +a change to merge:: + + $ tox -e pep8 + +Running the actualy in Python 2.7 tests:: + + $ tox -e py27 -v -- -v \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst index 8225239..3e18706 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -13,6 +13,7 @@ Contents: architecture api + developing glossary install/index resources/index diff --git a/doc/source/install/common.rst b/doc/source/install/common.rst new file mode 100644 index 0000000..95a2e75 --- /dev/null +++ b/doc/source/install/common.rst @@ -0,0 +1,85 @@ +.. + Copyright 2013 Endre Karlson + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + + +.. _system-deps:: + +System dependencies +=================== + +.. index:: + double: installing; common_steps + +.. note:: + The below operations should take place underneath your folder. + +Install module dependencies + +Debian, Ubuntu:: + + $ apt-get install python-pip python-lxml + +Fedora, Centos, RHEL:: + + $ yum install pip-python python-lxml + + +.. _storage-deps:: + +Storage dependencies +==================== + +.. index:: installing; storage + +Depending on the datastore that is currently supported and your pick of them +you need to install the underlying server and client libraries as well as +python bindings. + +See `System dependencies`_ before continuing. + +Example for MySQL on Debian, Ubuntu:: + + $ apt-get install mysql-server mysql-client libmysqlclient-dev + +Using MySQL bindings:: + + $ pip install MySQL-python + +Using oursql bindings (use 'mysql+oursql://.....' instead of 'mysql://'):: + + $ pip install oursql + + +.. _cloning-git:: + + +Cloning git repo +================ +1. Install GIT. + + On ubuntu you do the following:: + + $ apt-get install git-core + + On Fedora / Centos / RHEL:: + + $ apt-get install git + +2. Clone a BS repo off of Github:: + + $ git clone https://github.com/billingstack/ + $ cd + +3. Now continue with whatever other thing needs to be setup. \ No newline at end of file diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst index 33a2942..18f4d97 100644 --- a/doc/source/install/index.rst +++ b/doc/source/install/index.rst @@ -22,4 +22,6 @@ .. toctree:: :maxdepth: 2 + common manual + pgp diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index 73f7934..0b52285 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -91,30 +91,6 @@ Installing Central $ billingstack-central -Installing a PGP -================ - -.. index: - double: installing; pgp - -.. note:: - This is REQUIRED to be installed on the same machine that has access to - the database and that has the billingstack-manage command. - -1. Clone a provider repo off of github:: - - $ git clone git@github.com:billingstack/billingstack-braintree.git - -2. Install it in the SAME env / venv as the main billingstack package:: - - $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options - $ python setup.py develop - -3. Now register :term:`pgp` with it's :term:`pgm`:: - - $ billingstack-manage pg-register - - Installing the API ==================== diff --git a/doc/source/install/pgp.rst b/doc/source/install/pgp.rst new file mode 100644 index 0000000..bca05c6 --- /dev/null +++ b/doc/source/install/pgp.rst @@ -0,0 +1,61 @@ +.. + Copyright 2013 Endre Karlson + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + + +Installing a PGP +================ + +.. index: + double: installing; pgp + +.. note:: + This is REQUIRED to be installed on the same machine that has access to + the database and that has the billingstack-manage command. + +.. note:: + A PGP Can be installed either inside a virtualenv where the bs core is + installed or in a system wide install. + + +Python modules +============== + +1. Clone a provider repo off of github:: + + $ git clone git@github.com:billingstack/billingstack-braintree.git + +2. Install it in the SAME environment / virtualenv as the main billingstack core:: + + $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options + $ python setup.py develop + + +Registering the PGP +=================== + +.. note:: + So while the module is actually installed Python wise, it's needed to + load up some data into the database so the system knows of its existance. + +1. Install the PGP module using the process described above. + +2. Register :term:`pgp` with it's :term:`pgm`:: + + $ billingstack-manage pg-register + +3. Check the logs that the utility gives and list out registered pgp's:: + + $ billingstack-manage pg-list + From 457e1a7111e13b4747c07bb3c0610d0b744410ed Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 13:01:10 +0000 Subject: [PATCH 038/182] Add back db manage script --- bin/billingstack-db-manage | 26 ++++++++++++++++++++++++++ setup.py | 1 + 2 files changed, 27 insertions(+) create mode 100755 bin/billingstack-db-manage diff --git a/bin/billingstack-db-manage b/bin/billingstack-db-manage new file mode 100755 index 0000000..4dc66b0 --- /dev/null +++ b/bin/billingstack-db-manage @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2012 New Dream Network, LLC (DreamHost) +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import sys +sys.path.insert(0, os.getcwd()) + +from billingstack.storage.impl_sqlalchemy.migration.cli import main + + +main() diff --git a/setup.py b/setup.py index 48d8fb8..8921b58 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ dependency_links=dependency_links, scripts=[ 'bin/billingstack-api', + 'bin/billingstack-db-manage', 'bin/billingstack-identity-api', 'bin/billingstack-manage', 'bin/billingstack-central' From 741334e9bd112db2b07a3fb25d635135b6bf5a89 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 13:10:29 +0000 Subject: [PATCH 039/182] Update Oslo dep and move options --- billingstack/__init__.py | 16 ---------------- billingstack/api/service.py | 3 +++ billingstack/central/service.py | 2 ++ billingstack/conf.py | 16 ++++++++++++++++ tools/pip-requires | 3 ++- 5 files changed, 23 insertions(+), 17 deletions(-) create mode 100644 billingstack/conf.py diff --git a/billingstack/__init__.py b/billingstack/__init__.py index b45cd58..e69de29 100644 --- a/billingstack/__init__.py +++ b/billingstack/__init__.py @@ -1,16 +0,0 @@ -import os -from oslo.config import cfg - -from billingstack.openstack.common import rpc - -cfg.CONF.register_opts([ - cfg.StrOpt('pybasedir', - default=os.path.abspath(os.path.join(os.path.dirname(__file__), - '../')), - help='Directory where the nova python module is installed'), - cfg.StrOpt('state-path', default='$pybasedir', - help='Top-level directory for maintaining billingstack\'s state') -]) - - -rpc.set_defaults(control_exchange='billingstack') diff --git a/billingstack/api/service.py b/billingstack/api/service.py index 20be4eb..f6433fd 100644 --- a/billingstack/api/service.py +++ b/billingstack/api/service.py @@ -25,6 +25,9 @@ #from billingstack import policy +cfg.CONF.import_opt('state_path', 'billingstack.conf') + + LOG = logging.getLogger(__name__) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 526de41..71215b2 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -10,6 +10,8 @@ LOG = logging.getLogger(__name__) +cfg.CONF.import_opt('state_path', 'billingstack.conf') + class Service(rpc_service.Service): def __init__(self, *args, **kwargs): diff --git a/billingstack/conf.py b/billingstack/conf.py new file mode 100644 index 0000000..b45cd58 --- /dev/null +++ b/billingstack/conf.py @@ -0,0 +1,16 @@ +import os +from oslo.config import cfg + +from billingstack.openstack.common import rpc + +cfg.CONF.register_opts([ + cfg.StrOpt('pybasedir', + default=os.path.abspath(os.path.join(os.path.dirname(__file__), + '../')), + help='Directory where the nova python module is installed'), + cfg.StrOpt('state-path', default='$pybasedir', + help='Top-level directory for maintaining billingstack\'s state') +]) + + +rpc.set_defaults(control_exchange='billingstack') diff --git a/tools/pip-requires b/tools/pip-requires index 7efb5ca..32be233 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -7,7 +7,8 @@ argparse -e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme pycountry cliff -http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config +#http://tarballs.openstack.org/oslo-config/oslo-config-master.tar.gz#egg=oslo-config +oslo.config>=1.1.0 # From OpenStack Common routes>=1.12.3 From b0ac63eb181498b499c385790a1f0f9edde17cdb Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 13:11:18 +0000 Subject: [PATCH 040/182] Remove unused comment --- billingstack/central/service.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 71215b2..2ca90d5 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -22,8 +22,6 @@ def __init__(self, *args, **kwargs): super(Service, self).__init__(*args, **kwargs) - # Get a storage connection - def start(self): self.storage_conn = storage.get_connection() super(Service, self).start() From e131a0e84645951e5620c58d731067cf06cfe6a6 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 20 Mar 2013 13:31:37 +0000 Subject: [PATCH 041/182] Fix pep8 --- billingstack/conf.py | 2 +- billingstack/exceptions.py | 1 + billingstack/identity/impl_sqlalchemy.py | 7 +- billingstack/payment_gateway/__init__.py | 3 +- billingstack/payment_gateway/service.py | 8 +- billingstack/tests/base.py | 38 +++-- billingstack/tests/identity/test_api.py | 22 +-- billingstack/tests/storage/__init__.py | 142 ++++++++++++------ billingstack/tests/storage/test_sqlalchemy.py | 2 +- 9 files changed, 138 insertions(+), 87 deletions(-) diff --git a/billingstack/conf.py b/billingstack/conf.py index b45cd58..fd88d31 100644 --- a/billingstack/conf.py +++ b/billingstack/conf.py @@ -9,7 +9,7 @@ '../')), help='Directory where the nova python module is installed'), cfg.StrOpt('state-path', default='$pybasedir', - help='Top-level directory for maintaining billingstack\'s state') + help='Top-level directory for maintaining state') ]) diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py index aaea0da..0699d31 100644 --- a/billingstack/exceptions.py +++ b/billingstack/exceptions.py @@ -52,6 +52,7 @@ class InvalidQueryField(Base): class InvalidOperator(Base): pass + class Duplicate(Base): pass diff --git a/billingstack/identity/impl_sqlalchemy.py b/billingstack/identity/impl_sqlalchemy.py index 8b3d70f..8a98b15 100644 --- a/billingstack/identity/impl_sqlalchemy.py +++ b/billingstack/identity/impl_sqlalchemy.py @@ -15,7 +15,7 @@ A Identity plugin... """ from oslo.config import cfg -from sqlalchemy import Column, ForeignKey, UniqueConstraint +from sqlalchemy import Column, ForeignKey from sqlalchemy import Unicode from sqlalchemy.orm import exc from sqlalchemy.ext.declarative import declarative_base @@ -51,7 +51,7 @@ class UserAccountGrant(BASE): user_id = Column(UUID, ForeignKey('user.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True) account_id = Column(UUID, ForeignKey('account.id', ondelete='CASCADE', - onupdate='CASCADE'), primary_key=True) + onupdate='CASCADE'), primary_key=True) data = Column(JSON) @@ -80,7 +80,8 @@ def __init__(self): def base(self): return BASE - def authenticate(self, context, user_id=None, password=None, account_id=None): + def authenticate(self, context, user_id=None, password=None, + account_id=None): #self._get_by_name(models. pass diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py index f776e70..509687b 100644 --- a/billingstack/payment_gateway/__init__.py +++ b/billingstack/payment_gateway/__init__.py @@ -14,7 +14,8 @@ def _register(ep, context, conn): values = provider.values() - LOG.debug("Attempting registration of PGP %s" % ep.plugin.get_plugin_name()) + LOG.debug("Attempting registration of PGP %s" % + ep.plugin.get_plugin_name()) try: methods = provider.methods() except NotImplementedError: diff --git a/billingstack/payment_gateway/service.py b/billingstack/payment_gateway/service.py index cde2dd0..b7f7c08 100644 --- a/billingstack/payment_gateway/service.py +++ b/billingstack/payment_gateway/service.py @@ -3,14 +3,9 @@ """ import functools -import re from oslo.config import cfg from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc from billingstack.openstack.common.rpc import service as rpc_service -from stevedore.named import NamedExtensionManager -from billingstack import exceptions -from billingstack import utils from billingstack.central.rpcapi import CentralAPI @@ -34,7 +29,8 @@ def __init__(self, *args, **kwargs): def get_pg_provider(self, ctxt, pg_info): """ - Work out a PGC config either from pg_info or via ctxt fetching it from central. + Work out a PGC config either from pg_info or via ctxt fetching it + from central. Return the appropriate PGP for this info. :param ctxt: Request context diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 3a093ae..b46b1a2 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -7,9 +7,10 @@ from billingstack import exceptions from billingstack import samples from billingstack import storage +from billingstack.api import service as api_service from billingstack.central import service as central_service -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.context import RequestContext, get_admin_context +from billingstack.openstack.common.context import RequestContext, \ + get_admin_context cfg.CONF.import_opt('storage_driver', 'billingstack.central', @@ -140,24 +141,28 @@ def setSamples(self): def create_language(self, fixture=0, values={}, **kw): fixture = self.get_fixture('language', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_language(ctxt, fixture, **kw) + return fixture, self.central_service.create_language(ctxt, fixture, + **kw) def create_currency(self, fixture=0, values={}, **kw): fixture = self.get_fixture('currency', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_currency(ctxt, fixture, **kw) + return fixture, self.central_service.create_currency(ctxt, fixture, + **kw) def create_invoice_state(self, fixture=0, values={}, **kw): fixture = self.get_fixture('invoice_state', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_invoice_state(ctxt, fixture, **kw) + return fixture, self.central_service.create_invoice_state( + ctxt, fixture, **kw) def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): methods = [self.get_fixture('pg_method')] or methods fixture = self.get_fixture('pg_provider', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - data = self.central_service.pg_provider_register(ctxt, fixture, methods=methods, **kw) + data = self.central_service.pg_provider_register(ctxt, fixture, + methods=methods, **kw) fixture['methods'] = methods return fixture, data @@ -181,20 +186,24 @@ def create_merchant(self, fixture=0, values={}, **kw): self._account_defaults(fixture) - return fixture, self.central_service.create_merchant(ctxt, fixture, **kw) + return fixture, self.central_service.create_merchant( + ctxt, fixture, **kw) def create_pg_config(self, provider_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_pg_config(ctxt, self.merchant['id'], provider_id, fixture, **kw) + return fixture, self.central_service.create_pg_config( + ctxt, self.merchant['id'], provider_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.central_service.create_customer(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.create_customer( + ctxt, merchant_id, fixture, **kw) - def create_payment_method(self, customer_id, provider_method_id, fixture=0, values={}, **kw): + def create_payment_method(self, customer_id, provider_method_id, fixture=0, + values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.central_service.create_payment_method( @@ -203,14 +212,17 @@ def create_payment_method(self, customer_id, provider_method_id, fixture=0, valu def user_add(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('user', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.user_add(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.user_add( + ctxt, merchant_id, fixture, **kw) def create_product(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('product', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_product(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.create_product( + ctxt, merchant_id, fixture, **kw) def create_plan(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('plan', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_plan(ctxt, merchant_id, fixture, **kw) + return fixture, self.central_service.create_plan( + ctxt, merchant_id, fixture, **kw) diff --git a/billingstack/tests/identity/test_api.py b/billingstack/tests/identity/test_api.py index bf5bff6..2805ff1 100644 --- a/billingstack/tests/identity/test_api.py +++ b/billingstack/tests/identity/test_api.py @@ -9,9 +9,10 @@ from billingstack.tests.base import BaseTestCase -cfg.CONF.import_opt('database_connection', - 'billingstack.identity.impl_sqlalchemy', - group='identity:sqlalchemy') +cfg.CONF.import_opt( + 'database_connection', + 'billingstack.identity.impl_sqlalchemy', + group='identity:sqlalchemy') ROLE = { @@ -52,7 +53,6 @@ def tearDown(self): super(IdentityAPITest, self).tearDown() set_config({}, overwrite=True) - def make_config(self, enable_acl=True): root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', @@ -74,8 +74,8 @@ def make_config(self, enable_acl=True): 'root': {'level': 'INFO', 'handlers': ['console']}, 'wsme': {'level': 'INFO', 'handlers': ['console']}, 'billingstack': {'level': 'DEBUG', - 'handlers': ['console'], - }, + 'handlers': ['console'], + }, }, 'handlers': { 'console': { @@ -137,12 +137,6 @@ def test_delete_account(self): resp = self.get('accounts') self.assertLen(0, resp.json) - def test_create_account(self): - values = self.get_fixture('merchant') - values['type'] = 'merchant' - - self.post('accounts', values) - # Roles def test_create_role(self): values = ROLE.copy() @@ -150,7 +144,7 @@ def test_create_role(self): resp = self.post('roles', values) assert resp.json['name'] == values['name'] - assert resp.json['id'] != None + assert resp.json['id'] is not None def test_list_roles(self): resp = self.get('roles') @@ -264,7 +258,7 @@ def test_revoke_grant(self): def test_login(self): user_data = self.get_fixture('user') - user = self.post('users', user_data).json + self.post('users', user_data).json resp = self.post('tokens', user_data) diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 7acdf76..38c9e68 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -49,7 +49,8 @@ def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): fixture = self.get_fixture('pg_provider', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - data = self.storage_conn.pg_provider_register(ctxt, fixture, methods=methods, **kw) + data = self.storage_conn.pg_provider_register( + ctxt, fixture, methods=methods, **kw) fixture['methods'] = methods return fixture, data @@ -70,15 +71,18 @@ def create_merchant(self, fixture=0, values={}, **kw): def create_pg_config(self, provider_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_pg_config(ctxt, self.merchant['id'], provider_id, fixture, **kw) + return fixture, self.storage_conn.create_pg_config( + ctxt, self.merchant['id'], provider_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.storage_conn.create_customer(ctxt, merchant_id, fixture, **kw) + return fixture, self.storage_conn.create_customer( + ctxt, merchant_id, fixture, **kw) - def create_payment_method(self, customer_id, provider_method_id, fixture=0, values={}, **kw): + def create_payment_method(self, customer_id, provider_method_id, fixture=0, + values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.storage_conn.create_payment_method( @@ -87,12 +91,14 @@ def create_payment_method(self, customer_id, provider_method_id, fixture=0, valu def create_product(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('product', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_product(ctxt, merchant_id, fixture, **kw) + return fixture, self.storage_conn.create_product( + ctxt, merchant_id, fixture, **kw) def create_plan(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('plan', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_plan(ctxt, merchant_id, fixture, **kw) + return fixture, self.storage_conn.create_plan( + ctxt, merchant_id, fixture, **kw) # Currencies def test_create_currency(self): @@ -106,10 +112,12 @@ def test_set_properties(self): fixture, data = self.create_product(self.merchant['id']) metadata = {"random": True} - self.storage_conn.set_properties(data['id'], metadata, cls=models.Product) + self.storage_conn.set_properties(data['id'], metadata, + cls=models.Product) metadata.update({'foo': 1, 'bar': 2}) - self.storage_conn.set_properties(data['id'], metadata, cls=models.Product) + self.storage_conn.set_properties(data['id'], metadata, + cls=models.Product) actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) self.assertLen(6, actual['properties']) @@ -135,7 +143,8 @@ def test_pg_provider_register_different_methods(self): methods = [method1, method2, method3] provider = {'name': 'noop'} - provider = self.storage_conn.pg_provider_register(self.admin_ctxt, provider, methods) + provider = self.storage_conn.pg_provider_register( + self.admin_ctxt, provider, methods) # TODO(ekarls): Make this more extensive? self.assertLen(3, provider['methods']) @@ -178,19 +187,23 @@ def test_pg_provider_register_method_switch_methods(self): def test_get_pg_provider(self): _, expected = self.pg_provider_register() - actual = self.storage_conn.get_pg_provider(self.admin_ctxt, expected['id']) + actual = self.storage_conn.get_pg_provider(self.admin_ctxt, + expected['id']) self.assertData(expected, actual) def test_get_pg_provider_missing(self): - self.assertMissing(self.storage_conn.get_pg_provider, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.get_pg_provider, + self.admin_ctxt, UUID) def test_pg_provider_deregister(self): _, data = self.pg_provider_register() self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.pg_provider_deregister, self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.pg_provider_deregister, + self.admin_ctxt, data['id']) def test_pg_provider_deregister_missing(self): - self.assertMissing(self.storage_conn.pg_provider_deregister, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.pg_provider_deregister, + self.admin_ctxt, UUID) # Payment Gateway Configuration def test_create_pg_config(self): @@ -203,14 +216,16 @@ def test_get_pg_config(self): fixture, data = self.create_pg_config(provider['id']) def test_get_pg_config_missing(self): - self.assertMissing(self.storage_conn.get_pg_config, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.get_pg_config, + self.admin_ctxt, UUID) def test_update_pg_config(self): _, provider = self.pg_provider_register() fixture, data = self.create_pg_config(provider['id']) fixture['properties'] = {"api": 1} - updated = self.storage_conn.update_pg_config(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_pg_config( + self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) @@ -218,17 +233,20 @@ def test_update_pg_config_missing(self): _, provider = self.pg_provider_register() fixture, data = self.create_pg_config(provider['id']) - self.assertMissing(self.storage_conn.update_pg_config, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_pg_config, + self.admin_ctxt, UUID, {}) def test_delete_pg_config(self): _, provider = self.pg_provider_register() fixture, data = self.create_pg_config(provider['id']) - self.storage_conn.delete_pg_config(self.admin_ctxt,data['id']) - self.assertMissing(self.storage_conn.get_pg_config, self.admin_ctxt, data['id']) + self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_pg_config, + self.admin_ctxt, data['id']) def test_delete_pg_config_missing(self): - self.assertMissing(self.storage_conn.delete_pg_config, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.delete_pg_config, + self.admin_ctxt, UUID) # PaymentMethod def test_create_payment_method(self): @@ -245,7 +263,8 @@ def test_get_payment_method(self): _, customer = self.create_customer(self.merchant['id']) _, expected = self.create_payment_method(customer['id'], m_id) - actual = self.storage_conn.get_payment_method(self.admin_ctxt, expected['id']) + actual = self.storage_conn.get_payment_method(self.admin_ctxt, + expected['id']) self.assertData(expected, actual) # TODO(ekarlso): Make this test more extensive? @@ -272,7 +291,8 @@ def test_list_payment_methods(self): self.assertLen(2, rows) def test_get_payment_method_missing(self): - self.assertMissing(self.storage_conn.get_payment_method, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.get_payment_method, + self.admin_ctxt, UUID) def test_update_payment_method(self): _, provider = self.pg_provider_register() @@ -282,22 +302,26 @@ def test_update_payment_method(self): fixture, data = self.create_payment_method(customer['id'], m_id) fixture['identifier'] = 1 - updated = self.storage_conn.update_payment_method(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_payment_method(self.admin_ctxt, + data['id'], fixture) self.assertData(fixture, updated) def test_update_payment_method_missing(self): - self.assertMissing(self.storage_conn.update_payment_method, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_payment_method, + self.admin_ctxt, UUID, {}) def test_delete_payment_method(self): _, provider = self.pg_provider_register() fixture, data = self.create_pg_config(provider['id']) self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.delete_payment_method, self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.delete_payment_method, + self.admin_ctxt, data['id']) def test_delete_payment_method_missing(self): - self.assertMissing(self.storage_conn.delete_payment_method, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.delete_payment_method, + self.admin_ctxt, UUID) # Merchant def test_create_merchant(self): @@ -306,29 +330,35 @@ def test_create_merchant(self): def test_get_merchant(self): _, expected = self.create_merchant() - actual = self.storage_conn.get_merchant(self.admin_ctxt, expected['id']) + actual = self.storage_conn.get_merchant( + self.admin_ctxt, expected['id']) self.assertData(expected, actual) def test_get_merchant_missing(self): - self.assertMissing(self.storage_conn.get_merchant, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.get_merchant, + self.admin_ctxt, UUID) def test_update_merchant(self): fixture, data = self.create_merchant() fixture['name'] = 'test' - updated = self.storage_conn.update_merchant(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_merchant( + self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) def test_update_merchant_missing(self): - self.assertMissing(self.storage_conn.update_merchant, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_merchant, + self.admin_ctxt, UUID, {}) def test_delete_merchant(self): self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) - self.assertMissing(self.storage_conn.get_merchant, self.admin_ctxt, self.merchant['id']) + self.assertMissing(self.storage_conn.get_merchant, + self.admin_ctxt, self.merchant['id']) def test_delete_merchant_missing(self): - self.assertMissing(self.storage_conn.delete_merchant, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.delete_merchant, + self.admin_ctxt, UUID) # Customer def test_create_customer(self): @@ -348,30 +378,36 @@ def test_create_customer_with_contact_info(self): def test_get_customer(self): _, expected = self.create_customer(self.merchant['id']) - actual = self.storage_conn.get_customer(self.admin_ctxt, expected['id']) + actual = self.storage_conn.get_customer( + self.admin_ctxt, expected['id']) self.assertData(expected, actual) def test_get_customer_missing(self): - self.assertMissing(self.storage_conn.get_customer, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.get_customer, + self.admin_ctxt, UUID) def test_update_customer(self): fixture, data = self.create_customer(self.merchant['id']) fixture['name'] = 'test' - updated = self.storage_conn.update_customer(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_customer( + self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) def test_update_customer_missing(self): - self.assertMissing(self.storage_conn.update_customer, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_customer, + self.admin_ctxt, UUID, {}) def test_delete_customer(self): _, data = self.create_customer(self.merchant['id']) self.storage_conn.delete_customer(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_customer, self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_customer, + self.admin_ctxt, data['id']) def test_delete_customer_missing(self): - self.assertMissing(self.storage_conn.delete_customer, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.delete_customer, + self.admin_ctxt, UUID) # Products def test_create_product(self): @@ -384,26 +420,31 @@ def test_get_product(self): self.assertData(expected, actual) def test_get_product_missing(self): - self.assertMissing(self.storage_conn.get_product, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.get_product, + self.admin_ctxt, UUID) def test_update_product(self): fixture, data = self.create_product(self.merchant['id']) fixture['name'] = 'test' - updated = self.storage_conn.update_product(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_product( + self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) def test_update_product_missing(self): - self.assertMissing(self.storage_conn.update_product, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_product, + self.admin_ctxt, UUID, {}) def test_delete_product(self): fixture, data = self.create_product(self.merchant['id']) self.storage_conn.delete_product(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_product, self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_product, + self.admin_ctxt, data['id']) def test_delete_product_missing(self): - self.assertMissing(self.storage_conn.delete_product, self.admin_ctxt, UUID) + self.assertMissing(self.storage_conn.delete_product, + self.admin_ctxt, UUID) # Plan def test_create_plan_with_items(self): @@ -425,8 +466,9 @@ def test_get_plan(self): fixture, data = self.create_plan(self.merchant['id']) actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) - # FIXME(ekarlso): This should test the actual items also? But atm there's an - # error that if the value is int when getting added it's string when returned... + # FIXME(ekarlso): This should test the actual items also? But atm + # there's am error that if the value is int when getting added it's + # string when returned... self.assertEqual(data['name'], actual['name']) self.assertEqual(data['title'], actual['title']) self.assertEqual(data['description'], actual['description']) @@ -438,17 +480,21 @@ def test_update_plan(self): fixture, data = self.create_plan(self.merchant['id']) fixture['name'] = 'test' - updated = self.storage_conn.update_plan(self.admin_ctxt, data['id'], fixture) + updated = self.storage_conn.update_plan( + self.admin_ctxt, data['id'], fixture) self.assertData(fixture, updated) def test_update_plan_missing(self): - self.assertMissing(self.storage_conn.update_plan, self.admin_ctxt, UUID, {}) + self.assertMissing(self.storage_conn.update_plan, + self.admin_ctxt, UUID, {}) def test_delete_plan(self): fixture, data = self.create_plan(self.merchant['id']) self.storage_conn.delete_plan(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_plan, + self.admin_ctxt, data['id']) def test_delete_plan_missing(self): - self.assertMissing(self.storage_conn.delete_plan, self.admin_ctxt, UUID) \ No newline at end of file + self.assertMissing(self.storage_conn.delete_plan, + self.admin_ctxt, UUID) diff --git a/billingstack/tests/storage/test_sqlalchemy.py b/billingstack/tests/storage/test_sqlalchemy.py index 20a0d7b..a1aa60f 100644 --- a/billingstack/tests/storage/test_sqlalchemy.py +++ b/billingstack/tests/storage/test_sqlalchemy.py @@ -27,4 +27,4 @@ class SqlalchemyStorageTest(StorageDriverTestCase): def setUp(self): self.config(database_connection='sqlite://', group='storage:sqlalchemy') - super(SqlalchemyStorageTest, self).setUp() \ No newline at end of file + super(SqlalchemyStorageTest, self).setUp() From be25c4458f4748873948dedf1ba5a635d64dfa20 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 21 Mar 2013 12:40:32 +0000 Subject: [PATCH 042/182] Various fixes and doc' --- billingstack/api/service.py | 2 +- billingstack/api/v1/resources.py | 2 +- billingstack/central/service.py | 5 +- billingstack/paths.py | 68 +++++++++++++++++++ billingstack/sqlalchemy/api.py | 27 +++++++- .../storage/impl_sqlalchemy/models.py | 7 +- 6 files changed, 99 insertions(+), 12 deletions(-) create mode 100644 billingstack/paths.py diff --git a/billingstack/api/service.py b/billingstack/api/service.py index f6433fd..fbdfabd 100644 --- a/billingstack/api/service.py +++ b/billingstack/api/service.py @@ -25,7 +25,7 @@ #from billingstack import policy -cfg.CONF.import_opt('state_path', 'billingstack.conf') +cfg.CONF.import_opt('state_path', 'billingstack.paths') LOG = logging.getLogger(__name__) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 556b578..a2ad103 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -310,7 +310,7 @@ def create_payment_method(merchant_id, customer_id): @bp.get('/merchants//customers//payment-methods') -def list_payment_methods(merchant_id): +def list_payment_methods(merchant_id, customer_id): rows = central_api.list_payment_methods(request.environ['context']) return render([models.PaymentMethod.from_db(r) for r in rows]) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 2ca90d5..c03fc19 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -5,13 +5,12 @@ from billingstack import storage -cfg.CONF.import_opt('host', 'billingstack.netconf') cfg.CONF.import_opt('central_topic', 'billingstack.central.rpcapi') +cfg.CONF.import_opt('host', 'billingstack.netconf') +cfg.CONF.import_opt('state_path', 'billingstack.paths') LOG = logging.getLogger(__name__) -cfg.CONF.import_opt('state_path', 'billingstack.conf') - class Service(rpc_service.Service): def __init__(self, *args, **kwargs): diff --git a/billingstack/paths.py b/billingstack/paths.py new file mode 100644 index 0000000..8d84289 --- /dev/null +++ b/billingstack/paths.py @@ -0,0 +1,68 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2010 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. +# All Rights Reserved. +# Copyright 2012 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os + +from oslo.config import cfg + +path_opts = [ + cfg.StrOpt('pybasedir', + default=os.path.abspath(os.path.join(os.path.dirname(__file__), + '../')), + help='Directory where the nova python module is installed'), + cfg.StrOpt('bindir', + default='$pybasedir/bin', + help='Directory where nova binaries are installed'), + cfg.StrOpt('state_path', + default='$pybasedir', + help="Top-level directory for maintaining nova's state"), +] + +CONF = cfg.CONF +CONF.register_opts(path_opts) + + +def basedir_def(*args): + """Return an uninterpolated path relative to $pybasedir.""" + return os.path.join('$pybasedir', *args) + + +def bindir_def(*args): + """Return an uninterpolated path relative to $bindir.""" + return os.path.join('$bindir', *args) + + +def state_path_def(*args): + """Return an uninterpolated path relative to $state_path.""" + return os.path.join('$state_path', *args) + + +def basedir_rel(*args): + """Return a path relative to $pybasedir.""" + return os.path.join(CONF.pybasedir, *args) + + +def bindir_rel(*args): + """Return a path relative to $bindir.""" + return os.path.join(CONF.bindir, *args) + + +def state_path_rel(*args): + """Return a path relative to $state_path.""" + return os.path.join(CONF.state_path, *args) diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index e91f109..dfc4e0f 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -78,6 +78,8 @@ def apply_criteria(self): """ query = self.query + LOG.debug('Applying Critera %s' % self.criterion) + for c in self.criterion: # NOTE: Try to get the column try: @@ -99,11 +101,17 @@ def apply_criteria(self): else: msg = 'Invalid operator in criteria \'%s\'' % c raise exceptions.InvalidOperator(msg) + return query class HelpersMixin(object): def setup(self, config_group): + """ + Setup the Connection + + :param config_group: The config group to get the config from + """ self.session = session.get_session(config_group) self.engine = session.get_engine(config_group) @@ -119,12 +127,18 @@ def teardown_schema(self): base = self.base() base.metadata.drop_all(self.session.bind) - def _save(self, obj, save=True): + def _save(self, row, save=True): + """ + Save a row. + + :param row: The row to save. + :param save: Save or just return a ref. + """ if not save: - return obj + return row try: - obj.save(self.session) + row.save(self.session) except exceptions.Duplicate: raise @@ -157,6 +171,13 @@ def _list(self, cls=None, query=None, criterion=None): return result def _filter_id(self, cls, identifier, by_name): + """ + Apply filter for either id or name + + :param cls: The Model class. + :param identifier: The identifier of it. + :param by_name: By name. + """ if hasattr(cls, 'id') and utils.is_valid_id(identifier): return cls.id == identifier elif hasattr(cls, 'name') and by_name: diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index e81668f..389b805 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -401,8 +401,8 @@ class Subscription(BASE, BaseMixin): """ billing_day = Column(Integer) - resource_id = Column(Unicode(255)) - resource_type = Column(Unicode(255)) + resource_id = Column(Unicode(255), nullable=False) + resource_type = Column(Unicode(255), nullable=True) usages = relationship( 'Usage', @@ -421,8 +421,7 @@ class Subscription(BASE, BaseMixin): payment_method = relationship('PaymentMethod', backref='subscriptions') payment_method_id = Column(UUID, ForeignKey('payment_method.id', - ondelete='CASCADE', - onupdate='CASCADE'), + ondelete='CASCADE', onupdate='CASCADE'), nullable=False) From a7204ef7fe1ce22b92c9d54c468e235a4a99f59a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 21 Mar 2013 21:35:43 +0000 Subject: [PATCH 043/182] First step to adding more checks to code --- billingstack/central/service.py | 218 ++++++++++++++++++++++++++++++++ 1 file changed, 218 insertions(+) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index c03fc19..8a06564 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -43,3 +43,221 @@ def _wrapper(*args, **kw): return f(*args, **kw) setattr(self, name, _wrapper) return _wrapper + + def create_currency(self, ctxt, values): + return self.create_currency(ctxt, values) + + def list_currencies(self, ctxt, **kw): + return self.storage_conn.list_currencies(ctxt, **kw) + + def get_currency(self, ctxt, id_): + return self.storage.get_currency(ctxt, id_) + + def update_currency(self, ctxt, id_, values): + return self.storage_conn.update_currency(ctxt, id_, values) + + def delete_currency(self, ctxt, id_): + return self.storage_conn.delete_currency(ctxt, id_) + + def create_language(self, ctxt, values): + return self.storage_conn.create_language(ctxt, values) + + def list_languages(self, ctxt, **kw): + return self.storage_conn.list_languages(ctxt, **kw) + + def get_language(self, ctxt, id_): + return self.storage_conn.get_language(ctxt, id_) + + def update_language(self, ctxt, id_, values): + return self.storage_conn.update_language(ctxt, id_, values) + + def delete_language(self, ctxt, id_): + return self.storage_conn.delete_language(ctxt, id_) + + def create_invoice_state(self, ctxt, values): + return self.storage_conn.create_invoice_state(ctxt, values) + + def list_invoice_states(self, ctxt, **kw): + return self.storage_conn.list_invoice_states(ctxt, **kw) + + def get_invoice_state(self, ctxt, id_): + return self.storage_conn.get_invoice_state(ctxt, id_) + + def update_invoice_state(self, ctxt, id_, values): + return self.storage_conn.update_invoice_state(ctxt, id_) + + def delete_invoice_state(self, ctxt, id_): + return self.storage_conn.delete_invoice_state(ctxt, id_) + + # TODO Fix + def create_contact_info(self, ctxt, obj, values, cls=None, + rel_attr='contact_info'): + return self.storage_conn.create_contact_info(ctxt, values) + + def get_contact_info(self, ctxt, id_): + return self.storage_conn.get_contact_info(ctxt, id_) + + def update_contact_info(self, ctxt, id_, values): + return self.storage_conn.update_contact_info(ctxt, values) + + def delete_contact_info(self, ctxt, id_): + return self.storage_conn.delete_contact_info(ctxt, id_) + + def list_pg_providers(self, ctxt, **kw): + return self.storage_conn.list_pg_providers(ctxt, **kw) + + def get_pg_provider(self, ctxt, pgp_id): + return self.storage_conn.get_pg_provider(ctxt, pgp_id) + + def create_pg_method(self, ctxt, values): + return self.storage_conn.create_pg_method(ctxt, values) + + def list_pg_methods(self, ctxt, **kw): + return self.storage_conn.list_pg_methods(ctxt, **kw) + + def get_pg_method(self, ctxt, id_): + return self.storage_conn.get_pg_method(ctxt, id_) + + def update_pg_method(self, ctxt, id_, values): + return self.storage_conn.update_pg_method(ctxt, id_, values) + + def delete_pg_method(self, ctxt, id_): + return self.storage_conn.delete_pg_method(ctxt, id_) + + def create_pg_config(self, ctxt, merchant_id, provider_id, values): + return self.storage_conn.create_pg_config(ctxt, merchant_id, + provider_id, values) + + def list_pg_configs(self, ctxt, **kw): + return self.storage_conn.list_pg_configs(ctxt, **kw) + + def get_pg_config(self, ctxt, id_): + return self.storage_conn.get_pg_config(ctxt, id_) + + def update_pg_config(self, ctxt, id_, values): + return self.storage_conn.update_pg_config(ctxt, id_, values) + + def delete_pg_config(self, ctxt, id_): + return self.storage_conn.delete_pg_config(ctxt, id_) + + def create_payment_method(self, ctxt, customer_id, pg_method_id, values): + return self.storage_conn.create_payment_method( + ctxt, customer_id, pg_method_id, values) + + def list_payment_methods(self, ctxt, **kw): + return self.storage_conn.list_payment_methods(ctxt, **kw) + + def get_payment_method(self, ctxt, id_, **kw): + return self.storage_conn.get_payment_method(ctxt, id_) + + def update_payment_method(self, ctxt, id_, values): + return self.storage_conn.update_payment_method(ctxt, id_, values) + + def delete_payment_method(self, ctxt, id_): + return self.storage_conn.delete_payment_method(ctxt, id_) + + def create_merchant(self, ctxt, values): + return self.storage_conn.create_merchant(ctxt, values) + + def list_merchants(self, ctxt, **kw): + return self.storage_conn.list_merchants(ctxt, **kw) + + def get_merchant(self, ctxt, id_): + return self.storage_conn.get_merchant(ctxt, id_) + + def update_merchant(self, ctxt, id_, values): + return self.storage_conn.update_merchant(ctxt, id_, values) + + def delete_merchant(self, ctxt, id_): + return self.storage_conn.delete_merchant(ctxt, id_) + + def create_customer(self, ctxt, merchant_id, values): + return self.storage_conn.create_customer(ctxt, merchant_id, values) + + def list_customers(self, ctxt, **kw): + return self.storage_conn.list_customers(ctxt, **kw) + + def get_customer(self, ctxt, id_): + return self.storage_conn.get_customer(ctxt, id_) + + def update_customer(self, ctxt, id_, values): + return self.storage_conn.update_customer(ctxt, id_, values) + + def delete_customer(self, ctxt, id_): + return self.storage_conn.delete_customer(ctxt, id_) + + def create_plan(self, ctxt, merchant_id, values): + return self.storage_conn.create_plan(ctxt, merchant_id, values) + + def list_plans(self, ctxt, **kw): + return self.storage_conn.list_plans(ctxt, **kw) + + def get_plan(self, ctxt, id_): + return self.storage_conn.get_plan(ctxt, id_) + + def update_plan(self, ctxt, id_, values): + return self.storage_conn.update_plan(ctxt, id_, values) + + def delete_plan(self, ctxt, id_): + return self.storage_conn.delete_plan(ctxt, id_) + + def create_plan_item(self, ctxt, values): + return self.storage_conn.create_plan(ctxt, values) + + def update_plan_item(self, ctxt, id_, values): + return self.storage_conn.update_plan_item(ctxt, id_, values) + + def list_plan_items(self, ctxt, **kw): + return self.storage_conn.list_plan_items(ctxt, **kw) + + def get_plan_item(self, ctxt, id_): + return self.storage_conn.get_plan_item(ctxt, id_) + + def delete_plan_item(self, ctxt, id_): + return self.storage_conn.delete_plan_item(ctxt, id_) + + def create_product(self, ctxt, merchant_id, values): + return self.storage_conn.create_product(ctxt, merchant_id, values) + + def list_products(self, ctxt, **kw): + return self.storage_conn.list_products(ctxt, **kw) + + def get_product(self, ctxt, id_): + return self.storage_conn.get_product(ctxt, id_) + + def update_product(self, ctxt, id_, values): + return self.storage_conn.update_product(ctxt, id_, values) + + def delete_product(self, ctxt, id_): + return self.storage_conn.delete_product(ctxt, id_) + + def create_invoice(self, ctxt, merchant_id, values): + return self.storage_conn.create_invoice_state( + ctxt, merchant_id, values) + + def list_invoices(self, ctxt, **kw): + return self.storage_conn.list_invoices(ctxt, **kw) + + def get_invoice(self, ctxt, id_): + return self.storage_conn.get_invoice(ctxt, id_) + + def update_invoice(self, ctxt, id_, values): + return self.storage_conn.update_invoice(ctxt, id_, values) + + def delete_invoice(self, ctxt, id_): + return self.storage_conn.delete_invoice(ctxt, id_) + + def create_subscription(self, ctxt, customer_id, values): + return self.storage_conn.create_subscription(ctxt, customer_id, values) + + def list_subscriptions(self, ctxt, **kw): + return self.storage_conn.list_subscriptions(ctxt, **kw) + + def get_subscription(self, ctxt, id_): + return self.storage_conn.get_subscription(ctxt, id_) + + def update_subscription(self, ctxt, id_, values): + return self.storage_conn.update_subscription(ctxt, id_, values) + + def delete_subscription(self, ctxt, id_): + return self.storage_conn.delete_subscription(ctxt, id_) From 878842d1fb889991cdb28146d022668ddc39f65a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 22 Mar 2013 11:00:15 +0000 Subject: [PATCH 044/182] Add Usages / InvoiceLines --- billingstack/api/v1/resources.py | 97 +++++++++++++++ billingstack/central/rpcapi.py | 38 ++++++ billingstack/central/service.py | 31 +++++ .../storage/impl_sqlalchemy/__init__.py | 113 ++++++++++++++++++ 4 files changed, 279 insertions(+) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index a2ad103..38041e7 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -473,6 +473,52 @@ def update_invoice(merchant_id, invoice_id): return render(models.Invoice.from_db(row)) +# Products +@bp.post('/merchants//invoices//lines') +def create_invoice_line(merchant_id, invoice_id): + data = request_data(models.Product) + + row = central_api.create_invoice_line( + request.environ['context'], + invoice_id, + data) + + return render(models.Product.from_db(row)) + + +@bp.get('/merchants//invoices//lines') +def list_invoice_lines(merchant_id, invoice_id): + rows = central_api.list_invoice_lines(request.environ['context']) + + return render([models.Product.from_db(r) for r in rows]) + + +@bp.get('/merchants//invoices//lines/') +def get_invoice_line(merchant_id, invoice_id, line_id): + row = central_api.get_invoice_line(request.environ['context'], + line_id) + + return render(models.Product.from_db(row)) + + +@bp.put('/merchants//invoices//lines/') +def update_invoice_line(merchant_id, invoice_id, line_id): + data = request_data(models.Product) + + row = central_api.update_invoice_line( + request.environ['context'], + line_id, + data) + + return render(models.Product.from_db(row)) + + +@bp.delete('/merchants//invoices//lines/') +def delete_invoice_line(merchant_id, invoice_id, line_id): + central_api.delete_invoice_line(request.environ['context'], line_id) + return render() + + @bp.delete('/merchants//invoices/') def delete_invoice(merchant_id, invoice_id): central_api.delete_invoice(request.environ['context'], invoice_id) @@ -525,3 +571,54 @@ def delete_subscription(merchant_id, subscription_id): request.environ['context'], subscription_id) return render() + + +# Usage +@bp.post('/merchants//subscriptions//usage') +def create_usage(merchant_id, subscription_id): + data = request_data(models.Usage) + + row = central_api.create_usage( + request.environ['context'], + subscription_id, + data) + + return render(models.Usage.from_db(row)) + + +@bp.get('/merchants//subscriptions//usage') +def list_usages(merchant_id, subscription_id): + rows = central_api.list_usages(request.environ['context']) + + return render([models.Usage.from_db(r) for r in rows]) + + +@bp.get('/merchants//subscriptions/subscription_id>/usage/' + '') +def get_usage(merchant_id, subscription_id, usage_id): + row = central_api.get_usage(request.environ['context'], + usage_id) + + return render(models.Invoice.from_db(row)) + + +@bp.put('/merchants//subscriptions//usage/' + '') +def update_usage(merchant_id, subscription_id, usage_id): + data = request_data(models.Usage) + + row = central_api.update_usage( + request.environ['context'], + usage_id, + data) + + return render(models.Usage.from_db(row)) + + +@bp.delete('/merchants//subscriptions//usage/' + '') +def delete_usage(merchant_id, subscription_id, usage_id): + central_api.delete_usage( + request.environ['context'], + usage_id) + return render() diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index ffd039a..b8719b5 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -260,6 +260,25 @@ def update_invoice(self, ctxt, id_, values): def delete_invoice(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_invoice', id_=id_)) + # Invoice lines + def create_invoice_line(self, ctxt, invoice_id, values): + return self.call(ctxt, self.make_msg('create_invoice_line', + invoice_id=invoice_id, values=values)) + + def list_invoice_lines(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoice_lines', + criterion=criterion)) + + def get_invoice_line(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_invoice_line', id_=id_)) + + def update_invoice_line(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_invoice_linet', id_=id_, + values=values)) + + def delete_invoice_line(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_invoice_line', id_=id_)) + # Subscriptions def create_subscription(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_subscription', @@ -279,5 +298,24 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) + # Subscriptions + def create_usage(self, ctxt, subscription_id, values): + return self.call(ctxt, self.make_msg('create_usage', + subscription_id=subscription_id, values=values)) + + def list_usages(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_usages', + criterion=criterion)) + + def get_usage(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_usage', id_=id_)) + + def update_usage(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_usaget', id_=id_, + values=values)) + + def delete_usage(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_usage', id_=id_)) + central_api = CentralAPI() diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 8a06564..e9d9bc0 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -247,6 +247,22 @@ def update_invoice(self, ctxt, id_, values): def delete_invoice(self, ctxt, id_): return self.storage_conn.delete_invoice(ctxt, id_) + def create_invoice_line(self, ctxt, invoice_id, values): + return self.storage_conn.create_invoice_line_state( + ctxt, invoice_id, values) + + def list_invoice_lines(self, ctxt, **kw): + return self.storage_conn.list_invoice_lines(ctxt, **kw) + + def get_invoice_line(self, ctxt, id_): + return self.storage_conn.get_invoice_line(ctxt, id_) + + def update_invoice_line(self, ctxt, id_, values): + return self.storage_conn.update_invoice_line(ctxt, id_, values) + + def delete_invoice_line(self, ctxt, id_): + return self.storage_conn.delete_invoice_line(ctxt, id_) + def create_subscription(self, ctxt, customer_id, values): return self.storage_conn.create_subscription(ctxt, customer_id, values) @@ -261,3 +277,18 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.storage_conn.delete_subscription(ctxt, id_) + + def create_usage(self, ctxt, subscription_id, values): + return self.storage_conn.create_usage(ctxt, subscription_id, values) + + def list_usages(self, ctxt, **kw): + return self.storage_conn.list_usages(ctxt, **kw) + + def get_usage(self, ctxt, id_): + return self.storage_conn.get_usage(ctxt, id_) + + def update_usage(self, ctxt, id_, values): + return self.storage_conn.update_usage(ctxt, id_, values) + + def delete_usage(self, ctxt, id_): + return self.storage_conn.delete_usage(ctxt, id_) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index c3af19b..1dacf49 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -635,6 +635,63 @@ def delete_invoice(self, ctxt, id_): """ self._delete(models.Invoice, id_) + # Invoices Items + def _invoice_line(self, row): + line = dict(row) + return line + + def create_invoice_items(self, ctxt, invoice_id, values): + """ + Add a new Invoice + + :param invoice_id: The Invoice + :param values: Values describing the new Invoice Line + """ + invoice = self._get(models.Invoice, invoice_id) + + line = models.InvoiceLine(**values) + line.invoice = invoice + + self._save(line) + return self._invoice_line(line) + + def list_invoice_lines(self, ctxt, **kw): + """ + List Invoice Lines + """ + rows = self._list(models.InvoiceLine, **kw) + return map(self._invoice_line, rows) + + def get_invoice_line(self, ctxt, id_): + """ + Get a Invoice Line + + :param id_: The Invoice Line ID + """ + row = self._get(models.InvoiceLine, id_) + return self._invoice_line(row) + + def update_invoice_line(self, ctxt, id_, values): + """ + Update a Invoice Line + + :param id_: The Invoice ID + :param values: Values to update with + """ + row = self._get(models.InvoiceLine, id_) + row.update(values) + + self._save(row) + return self._invoice_line(row) + + def delete_invoice_line(self, ctxt, id_): + """ + Delete a Invoice Line + + :param id_: Invoice Line ID + """ + self._delete(models.InvoiceLine, id_) + # Subscriptions def _subscription(self, row): subscription = dict(row) @@ -693,3 +750,59 @@ def delete_subscription(self, ctxt, id_): :param id_: Subscription ID """ self._delete(models.Subscription, id_) + + # Usages + def _usage(self, row): + return dict(row) + + def create_usage(self, ctxt, subscription_id, values): + """ + Add a new Usage + + :param subscription_id: The Subscription + :param values: Values describing the new Subscription + """ + subscription = self._get(models.Subscription, subscription_id) + + usage = models.Usage(**values) + usage.subscription = subscription + + self._save(usage) + return self._usage(usage) + + def list_usages(self, ctxt, **kw): + """ + List Usage + """ + rows = self._list(models.Usage, **kw) + return map(self._usage, rows) + + def get_usage(self, ctxt, id_): + """ + Get a Usage + + :param id_: The Usage ID + """ + row = self._get(models.Usage, id_) + return self._usage(row) + + def update_usage(self, ctxt, id_, values): + """ + Update a Usage + + :param id_: The Usage ID + :param values: Values to update with + """ + row = self._get(models.Usage, id_) + row.update(values) + + self._save(row) + return self._usage(row) + + def delete_usage(self, ctxt, id_): + """ + Delete a Usage + + :param id_: Usage ID + """ + self._delete(models.Usage, id_) From 9af18574bb4d9e1d0f65fb4ab5284a90bd43f75c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 22 Mar 2013 13:29:50 +0000 Subject: [PATCH 045/182] Fix bugs --- billingstack/api/v1/models.py | 3 +++ billingstack/api/v1/resources.py | 23 +++++++++---------- billingstack/central/rpcapi.py | 16 +++++++------ billingstack/central/service.py | 14 +++++------ .../storage/impl_sqlalchemy/__init__.py | 12 ++++------ 5 files changed, 35 insertions(+), 33 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 59ae830..87b6499 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -115,6 +115,9 @@ class PaymentMethod(Base): identifier = text expires = text + customer_id = text + provider_method_id = text + properties = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 38041e7..f3efcea 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -102,15 +102,15 @@ def delete_language(language_id): # PGP / PGM @bp.get('/payment-gateway-providers') -def list_pg_providers(self): - rows = request.central_api.list_pg_provider(request.ctxt) +def list_pg_providers(): + rows = central_api.list_pg_provider(request.environ['context']) return render([models.PGProvider.from_db(r) for r in rows]) @bp.get('/payment-gateway-methods') -def list_pg_methods(self): - rows = request.central_api.list_pg_method(request.ctxt) +def list_pg_methods(): + rows = central_api.list_pg_methods(request.environ['context']) return render([models.PGMethod.from_db(r) for r in rows]) @@ -303,7 +303,7 @@ def create_payment_method(merchant_id, customer_id): row = central_api.create_payment_method( request.environ['context'], - merchant_id, + customer_id, data) return render(models.PaymentMethod.from_db(row)) @@ -528,21 +528,20 @@ def delete_invoice(merchant_id, invoice_id): # Subscription @bp.post('/merchants//subscriptions') def create_subscription(merchant_id): - data = request_data(models.Invoice) + data = request_data(models.Subscription) row = central_api.create_subscription( request.environ['context'], - merchant_id, data) - return render(models.Invoice.from_db(row)) + return render(models.Subscription.from_db(row)) @bp.get('/merchants//subscriptions') def list_subscriptions(merchant_id): rows = central_api.list_subscriptions(request.environ['context']) - return render([models.Invoice.from_db(r) for r in rows]) + return render([models.Subscription.from_db(r) for r in rows]) @bp.get('/merchants//subscriptions/') @@ -550,19 +549,19 @@ def get_subscription(merchant_id, subscription_id): row = central_api.get_subscription(request.environ['context'], subscription_id) - return render(models.Invoice.from_db(row)) + return render(models.Subscription.from_db(row)) @bp.put('/merchants//subscriptions/') def update_subscription(merchant_id, subscription_id): - data = request_data(models.Invoice) + data = request_data(models.Subscription) row = central_api.update_subscription( request.environ['context'], subscription_id, data) - return render(models.Invoice.from_db(row)) + return render(models.Subscription.from_db(row)) @bp.delete('/merchants//subscriptions/') diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index b8719b5..893cbaf 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -104,7 +104,10 @@ def list_pg_methods(self, ctxt, criterion=None): criterion=criterion)) def get_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('list_pg_methods', id_=id_)) + return self.call(ctxt, self.make_msg('get_pg_method', id_=id_)) + + def delete_pg_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_pg_method', id_=id_)) # PGC def create_pg_config(self, ctxt, merchant_id, provider_id, values): @@ -127,10 +130,9 @@ def delete_pg_config(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_pg_config', id_=id_)) # PaymentMethod - def create_payment_method(self, ctxt, customer_id, pg_method_id, values): + def create_payment_method(self, ctxt, customer_id, values): return self.call(ctxt, self.make_msg('create_payment_method', - customer_id=customer_id, pg_method_id=pg_method_id, - values=values)) + customer_id=customer_id, values=values)) def list_payment_methods(self, ctxt, criterion=None): return self.call(ctxt, self.make_msg('list_payment_methods', @@ -254,7 +256,7 @@ def get_invoice(self, ctxt, id_): return self.call(ctxt, self.make_msg('get_invoice', id_=id_)) def update_invoice(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoicet', id_=id_, + return self.call(ctxt, self.make_msg('update_invoice', id_=id_, values=values)) def delete_invoice(self, ctxt, id_): @@ -280,9 +282,9 @@ def delete_invoice_line(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_invoice_line', id_=id_)) # Subscriptions - def create_subscription(self, ctxt, merchant_id, values): + def create_subscription(self, ctxt, values): return self.call(ctxt, self.make_msg('create_subscription', - merchant_id=merchant_id, values=values)) + values=values)) def list_subscriptions(self, ctxt, criterion=None): return self.call(ctxt, self.make_msg('list_subscriptions', diff --git a/billingstack/central/service.py b/billingstack/central/service.py index e9d9bc0..473bca0 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -45,13 +45,13 @@ def _wrapper(*args, **kw): return _wrapper def create_currency(self, ctxt, values): - return self.create_currency(ctxt, values) + return self.storage_conn.create_currency(ctxt, values) def list_currencies(self, ctxt, **kw): return self.storage_conn.list_currencies(ctxt, **kw) def get_currency(self, ctxt, id_): - return self.storage.get_currency(ctxt, id_) + return self.storage_conn.get_currency(ctxt, id_) def update_currency(self, ctxt, id_, values): return self.storage_conn.update_currency(ctxt, id_, values) @@ -84,7 +84,7 @@ def get_invoice_state(self, ctxt, id_): return self.storage_conn.get_invoice_state(ctxt, id_) def update_invoice_state(self, ctxt, id_, values): - return self.storage_conn.update_invoice_state(ctxt, id_) + return self.storage_conn.update_invoice_state(ctxt, id_, values) def delete_invoice_state(self, ctxt, id_): return self.storage_conn.delete_invoice_state(ctxt, id_) @@ -140,9 +140,9 @@ def update_pg_config(self, ctxt, id_, values): def delete_pg_config(self, ctxt, id_): return self.storage_conn.delete_pg_config(ctxt, id_) - def create_payment_method(self, ctxt, customer_id, pg_method_id, values): + def create_payment_method(self, ctxt, customer_id, values): return self.storage_conn.create_payment_method( - ctxt, customer_id, pg_method_id, values) + ctxt, customer_id, values) def list_payment_methods(self, ctxt, **kw): return self.storage_conn.list_payment_methods(ctxt, **kw) @@ -263,8 +263,8 @@ def update_invoice_line(self, ctxt, id_, values): def delete_invoice_line(self, ctxt, id_): return self.storage_conn.delete_invoice_line(ctxt, id_) - def create_subscription(self, ctxt, customer_id, values): - return self.storage_conn.create_subscription(ctxt, customer_id, values) + def create_subscription(self, ctxt, values): + return self.storage_conn.create_subscription(ctxt, values) def list_subscriptions(self, ctxt, **kw): return self.storage_conn.list_subscriptions(ctxt, **kw) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 1dacf49..90ff2bb 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -319,16 +319,17 @@ def delete_pg_config(self, ctxt, id_): self._delete(models.PGConfig, id_) # PaymentMethod - def create_payment_method(self, ctxt, customer_id, pg_method_id, values): + def create_payment_method(self, ctxt, customer_id, values): """ Configure a PaymentMethod like a CreditCard """ customer = self._get_id_or_name(models.Customer, customer_id) - pg_method = self._get_id_or_name(models.PGMethod, pg_method_id) + provider_method = self._get_id_or_name( + models.PGMethod, values['provider_method_id']) row = models.PaymentMethod(**values) row.customer = customer - row.provider_method = pg_method + row.provider_method = provider_method self._save(row) return self._dict(row, extra=['provider_method']) @@ -697,17 +698,14 @@ def _subscription(self, row): subscription = dict(row) return subscription - def create_subscription(self, ctxt, customer_id, values): + def create_subscription(self, ctxt, values): """ Add a new Subscription :param merchant_id: The Merchant :param values: Values describing the new Subscription """ - customer = self._get(models.Customer, customer_id) - subscription = models.Subscription(**values) - subscription.customer = customer self._save(subscription) return self._subscription(subscription) From 5627b83043cb8c0903d106b5cd69194d38204969 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 22 Mar 2013 13:31:09 +0000 Subject: [PATCH 046/182] Typos --- billingstack/central/rpcapi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 893cbaf..cbfcb00 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -275,7 +275,7 @@ def get_invoice_line(self, ctxt, id_): return self.call(ctxt, self.make_msg('get_invoice_line', id_=id_)) def update_invoice_line(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_linet', id_=id_, + return self.call(ctxt, self.make_msg('update_invoice_line', id_=id_, values=values)) def delete_invoice_line(self, ctxt, id_): @@ -294,7 +294,7 @@ def get_subscription(self, ctxt, id_): return self.call(ctxt, self.make_msg('get_subscription', id_=id_)) def update_subscription(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_subscriptiont', id_=id_, + return self.call(ctxt, self.make_msg('update_subscription', id_=id_, values=values)) def delete_subscription(self, ctxt, id_): @@ -313,7 +313,7 @@ def get_usage(self, ctxt, id_): return self.call(ctxt, self.make_msg('get_usage', id_=id_)) def update_usage(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_usaget', id_=id_, + return self.call(ctxt, self.make_msg('update_usage', id_=id_, values=values)) def delete_usage(self, ctxt, id_): From 170bb2b305bc3aa893d3d2a8bcbfa69d8551572f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 22 Mar 2013 22:10:26 +0000 Subject: [PATCH 047/182] Remove the need for PGMethod id as a arg and fix test --- billingstack/tests/storage/__init__.py | 32 +++++++++++++++++--------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 38c9e68..694c8c3 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -81,12 +81,12 @@ def create_customer(self, merchant_id, fixture=0, values={}, **kw): return fixture, self.storage_conn.create_customer( ctxt, merchant_id, fixture, **kw) - def create_payment_method(self, customer_id, provider_method_id, fixture=0, + def create_payment_method(self, customer_id, fixture=0, values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.storage_conn.create_payment_method( - ctxt, customer_id, provider_method_id, fixture, **kw) + ctxt, customer_id, fixture, **kw) def create_product(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('product', fixture, values) @@ -254,7 +254,8 @@ def test_create_payment_method(self): m_id = provider['methods'][0]['id'] _, customer = self.create_customer(self.merchant['id']) - fixture, data = self.create_payment_method(customer['id'], m_id) + fixture, data = self.create_payment_method( + customer['id'], values={'provider_method_id': m_id}) self.assertData(fixture, data) def test_get_payment_method(self): @@ -262,7 +263,8 @@ def test_get_payment_method(self): m_id = provider['methods'][0]['id'] _, customer = self.create_customer(self.merchant['id']) - _, expected = self.create_payment_method(customer['id'], m_id) + _, expected = self.create_payment_method( + customer['id'], values={'provider_method_id': m_id}) actual = self.storage_conn.get_payment_method(self.admin_ctxt, expected['id']) self.assertData(expected, actual) @@ -275,15 +277,18 @@ def test_list_payment_methods(self): # Add two Customers with some methods _, customer1 = self.create_customer(self.merchant['id']) - self.create_payment_method(customer1['id'], m_id) + self.create_payment_method( + customer1['id'], values={'provider_method_id': m_id}) rows = self.storage_conn.list_payment_methods( self.admin_ctxt, criterion={'customer_id': customer1['id']}) self.assertLen(1, rows) _, customer2 = self.create_customer(self.merchant['id']) - self.create_payment_method(customer2['id'], m_id) - self.create_payment_method(customer2['id'], m_id) + self.create_payment_method( + customer2['id'], values={'provider_method_id': m_id}) + self.create_payment_method( + customer2['id'], values={'provider_method_id': m_id}) rows = self.storage_conn.list_payment_methods( self.admin_ctxt, @@ -299,7 +304,8 @@ def test_update_payment_method(self): m_id = provider['methods'][0]['id'] _, customer = self.create_customer(self.merchant['id']) - fixture, data = self.create_payment_method(customer['id'], m_id) + fixture, data = self.create_payment_method( + customer['id'], values={'provider_method_id': m_id}) fixture['identifier'] = 1 updated = self.storage_conn.update_payment_method(self.admin_ctxt, @@ -313,10 +319,14 @@ def test_update_payment_method_missing(self): def test_delete_payment_method(self): _, provider = self.pg_provider_register() - fixture, data = self.create_pg_config(provider['id']) + m_id = provider['methods'][0]['id'] + _, customer = self.create_customer(self.merchant['id']) - self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.delete_payment_method, + fixture, data = self.create_payment_method( + customer['id'], values={'provider_method_id': m_id}) + + self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_payment_method, self.admin_ctxt, data['id']) def test_delete_payment_method_missing(self): From c2e86b05bcb5912fe0c0d3b174fe83abc5a7638d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 23 Mar 2013 19:14:55 +0000 Subject: [PATCH 048/182] Multiple fixes.. Remove global PGM. Fix some opt imports. Fix method name --- billingstack/api/v1/resources.py | 4 +- billingstack/manage/__init__.py | 4 ++ billingstack/manage/provider.py | 2 +- billingstack/payment_gateway/service.py | 4 +- .../storage/impl_sqlalchemy/__init__.py | 38 ++++--------------- .../storage/impl_sqlalchemy/models.py | 29 +++----------- billingstack/tests/storage/__init__.py | 38 +------------------ 7 files changed, 24 insertions(+), 95 deletions(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index f3efcea..3cf5f10 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -103,12 +103,12 @@ def delete_language(language_id): # PGP / PGM @bp.get('/payment-gateway-providers') def list_pg_providers(): - rows = central_api.list_pg_provider(request.environ['context']) + rows = central_api.list_pg_providers(request.environ['context']) return render([models.PGProvider.from_db(r) for r in rows]) -@bp.get('/payment-gateway-methods') +@bp.get('/payment-gateway-providers//methods') def list_pg_methods(): rows = central_api.list_pg_methods(request.environ['context']) diff --git a/billingstack/manage/__init__.py b/billingstack/manage/__init__.py index 5f57c92..06f4f6b 100644 --- a/billingstack/manage/__init__.py +++ b/billingstack/manage/__init__.py @@ -13,11 +13,15 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +from oslo.config import cfg from cliff.app import App from cliff.commandmanager import CommandManager from billingstack.version import version_info as version +cfg.CONF.import_opt('state_path', 'billingstack.paths') + + class Shell(App): def __init__(self): super(Shell, self).__init__( diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py index a3ae244..8251fb6 100644 --- a/billingstack/manage/provider.py +++ b/billingstack/manage/provider.py @@ -16,7 +16,7 @@ def execute(self, parsed_args): class ProvidersList(DatabaseCommand, ListCommand): def execute(self, parsed_args): context = get_admin_context() - data = self.conn.list_pg_provider(context) + data = self.conn.list_pg_providers(context) for p in data: keys = ['type', 'name'] diff --git a/billingstack/payment_gateway/service.py b/billingstack/payment_gateway/service.py index b7f7c08..9ac3c20 100644 --- a/billingstack/payment_gateway/service.py +++ b/billingstack/payment_gateway/service.py @@ -10,7 +10,9 @@ cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('host', 'billingstack.payment_gateway.rpcapi') +cfg.CONF.import_opt('pg_topic', 'billingstack.payment_gateway.rpcapi') +cfg.CONF.import_opt('state_path', 'billingstack.paths') + LOG = logging.getLogger(__name__) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 90ff2bb..223de9f 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -228,49 +228,27 @@ def _get_provider_methods(self, provider): """ methods = {} for m in provider.methods: - m_key = m.key() - key = '%s:%s' % (m.owner_id, m_key) if m.owner_id else m_key - methods[key] = m + methods[m.key()] = m return methods def _set_provider_methods(self, ctxt, provider, config_methods): """ Helper method for setting the Methods for a Provider """ - rows = self.list_pg_methods(ctxt, criterion={"owner_id": None}) - system_methods = self._kv_rows(rows, key=models.PGMethod.make_key) - existing = self._get_provider_methods(provider) for method in config_methods: - self._set_method(provider, method, existing, system_methods) + self._set_method(provider, method, existing) self._save(provider) - def _set_method(self, provider, method, existing, all_methods): - method_key = models.PGMethod.make_key(method) - key = '%s:%s' % (provider.id, method_key) - - if method.pop('owned', False): - if method_key in existing: - provider.methods.remove(existing[method_key]) + def _set_method(self, provider, method, existing): + key = models.PGMethod.make_key(method) - if key in existing: - existing[key].update(method) - else: - row = models.PGMethod(**method) - provider.methods.append(row) - provider.provider_methods.append(row) + if key in existing: + existing[key].update(method) else: - if key in existing: - provider.methods.remove(existing[key]) - - try: - all_methods[method_key].providers.append(provider) - except KeyError: - msg = 'Provider %s tried to associate to non-existing'\ - 'method %s' % (provider.name, method_key) - LOG.error(msg) - raise exceptions.ConfigurationError(msg) + row = models.PGMethod(**method) + provider.methods.append(row) # PGMethods def create_pg_method(self, ctxt, values): diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 389b805..138faaa 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -11,7 +11,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -from sqlalchemy import Column, Table, ForeignKey, UniqueConstraint +from sqlalchemy import Column, ForeignKey, UniqueConstraint from sqlalchemy import Integer, Float from sqlalchemy import DateTime, Unicode from sqlalchemy.orm import relationship, backref @@ -45,12 +45,6 @@ class Language(BASE): title = Column(Unicode(100), nullable=False) -pg_provider_methods = Table( - 'pg_provider_methods', BASE.metadata, - Column('provider_id', UUID, ForeignKey('pg_provider.id')), - Column('method_id', UUID, ForeignKey('pg_method.id'))) - - class PGProvider(BASE, BaseMixin): """ A Payment Gateway - The thing that processes a Payment Method @@ -67,18 +61,7 @@ class PGProvider(BASE, BaseMixin): methods = relationship( 'PGMethod', - backref='providers', - secondary=pg_provider_methods, - primaryjoin="PGProvider.id==pg_provider_methods.c.provider_id", - secondaryjoin="PGMethod.id==pg_provider_methods.c.method_id", - lazy='joined') - - provider_methods = relationship( - 'PGMethod', - backref='owner', - primaryjoin='PGProvider.id == PGMethod.owner_id', - foreign_keys='[PGMethod.owner_id]', - post_update=True, + backref='provider', lazy='joined') def method_map(self): @@ -105,12 +88,10 @@ class PGMethod(BASE, BaseMixin): # NOTE: This is so a PGMethod can be "owned" by a Provider, meaning that # other Providers should not be able to use it. - owner_id = Column(UUID, ForeignKey( - 'pg_provider_methods.provider_id', + provider_id = Column(UUID, ForeignKey( + 'pg_provider.id', ondelete='CASCADE', - onupdate='CASCADE', - use_alter=True, - name='owner_fk')) + onupdate='CASCADE')) @staticmethod def make_key(data): diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 694c8c3..29a7f36 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -138,7 +138,7 @@ def test_pg_provider_register_different_methods(self): method2 = {'type': 'creditcard', 'name': 'amex'} self.storage_conn.create_pg_method(self.admin_ctxt, method2) - method3 = {'type': 'creditcard', 'name': 'visa', 'owned': 1} + method3 = {'type': 'creditcard', 'name': 'visa'} methods = [method1, method2, method3] provider = {'name': 'noop'} @@ -149,42 +149,6 @@ def test_pg_provider_register_different_methods(self): # TODO(ekarls): Make this more extensive? self.assertLen(3, provider['methods']) - def test_pg_provider_register_method_switch_methods(self): - provider_data = {'name': 'noop'} - - system_method = { - 'type': 'creditcard', - 'name': 'mastercard', - 'title': "random"} - self.storage_conn.create_pg_method(self.admin_ctxt, system_method) - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, - provider_data, - [system_method]) - self.assertLen(1, provider['methods']) - self.assertData(system_method, provider['methods'][0]) - - provider_method = { - 'type': 'creditcard', - 'name': 'mastercard', - 'title': 'random2', - 'owned': 1} - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, - provider_data, - [provider_method]) - self.assertLen(1, provider['methods']) - self.assertData(provider_method, provider['methods'][0]) - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, - provider_data, - [system_method]) - self.assertLen(1, provider['methods']) - self.assertData(system_method, provider['methods'][0]) - def test_get_pg_provider(self): _, expected = self.pg_provider_register() actual = self.storage_conn.get_pg_provider(self.admin_ctxt, From edf8ff4b1e8dceb8e513935568b875c6ae3c85a8 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 23 Mar 2013 21:21:53 +0000 Subject: [PATCH 049/182] Don't use the self.merchant --- billingstack/tests/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index b46b1a2..a5f680b 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -189,11 +189,12 @@ def create_merchant(self, fixture=0, values={}, **kw): return fixture, self.central_service.create_merchant( ctxt, fixture, **kw) - def create_pg_config(self, provider_id, fixture=0, values={}, **kw): + def create_pg_config(self, merchant_id, provider_id, fixture=0, values={}, + **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.central_service.create_pg_config( - ctxt, self.merchant['id'], provider_id, fixture, **kw) + ctxt, merchant_id, provider_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) From a6ad0f2053462bad90fe949ad205a28740457418 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 23 Mar 2013 21:23:08 +0000 Subject: [PATCH 050/182] Fixes for PaymentMethod --- billingstack/api/v1/models.py | 1 + billingstack/storage/impl_sqlalchemy/__init__.py | 4 ++-- billingstack/storage/impl_sqlalchemy/models.py | 6 +++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 87b6499..5a6767b 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -117,6 +117,7 @@ class PaymentMethod(Base): customer_id = text provider_method_id = text + provider_config_id = text properties = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 223de9f..e46bddc 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -271,8 +271,8 @@ def delete_pg_method(self, ctxt, id_): # Payment Gateway Configuration def create_pg_config(self, ctxt, merchant_id, provider_id, values): - merchant = self._get_id_or_name(models.Merchant, merchant_id) - provider = self._get_id_or_name(models.PGProvider, provider_id) + merchant = self._get(models.Merchant, merchant_id) + provider = self._get(models.PGProvider, provider_id) row = models.PGConfig(**values) row.merchant = merchant diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 138faaa..c6789ea 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -230,7 +230,11 @@ class PaymentMethod(BASE, BaseMixin): provider_method = relationship('PGMethod', backref='payment_methods') provider_method_id = Column(UUID, ForeignKey('pg_method.id', - onupdate='CASCADE')) + onupdate='CASCADE'), nullable=False) + + provider_config = relationship('PGConfig', backref='payment_methods') + provider_config_id = Column(UUID, ForeignKey('pg_config.id', + onupdate='CASCADE'), nullable=False) class InvoiceState(BASE): From ac3703d4bd7ad16413c871f4da7e41a32cc3c47c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 23 Mar 2013 21:51:32 +0000 Subject: [PATCH 051/182] Use from_db --- billingstack/api/v1/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 5a6767b..73e0847 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -52,7 +52,7 @@ class PGMethod(DescribedBase): class PGProvider(DescribedBase): def __init__(self, **kw): - kw['methods'] = [PGMethod(**m) for m in kw.get('methods', {})] + kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', {})] super(PGProvider, self).__init__(**kw) methods = [PGMethod] @@ -146,7 +146,7 @@ class Merchant(Account): class Customer(Account): def __init__(self, **kw): infos = kw.get('contact_info', {}) - kw['contact_info'] = [ContactInfo(**i) for i in infos] + kw['contact_info'] = [ContactInfo.from_db(i) for i in infos] super(Customer, self).__init__(**kw) merchant_id = text From e5f2b336d21724b5fd2b99578881f0448cf5ef3f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 23 Mar 2013 22:23:11 +0000 Subject: [PATCH 052/182] Add license header --- billingstack/api/v1/models.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 73e0847..62e28ed 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from wsme.types import text, DictType From adf903a529c329cc4785b0bbeb5628e8672d7fc3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 23 Mar 2013 22:24:23 +0000 Subject: [PATCH 053/182] Fix PM tests --- billingstack/tests/storage/__init__.py | 81 ++++++++++++++++++-------- 1 file changed, 56 insertions(+), 25 deletions(-) diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 29a7f36..8cab343 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -1,6 +1,6 @@ -# Copyright 2012 Managed I.T. +# -*- encoding: utf-8 -*- # -# Author: Kiall Mac Innes +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -13,8 +13,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -# -# Copied: Moniker from billingstack.openstack.common import log as logging from billingstack.storage.impl_sqlalchemy import models from billingstack.tests.base import TestCase @@ -68,11 +66,12 @@ def create_merchant(self, fixture=0, values={}, **kw): return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - def create_pg_config(self, provider_id, fixture=0, values={}, **kw): + def create_pg_config(self, merchant_id, provider_id, fixture=0, values={}, + **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.storage_conn.create_pg_config( - ctxt, self.merchant['id'], provider_id, fixture, **kw) + ctxt, merchant_id, provider_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) @@ -172,12 +171,14 @@ def test_pg_provider_deregister_missing(self): # Payment Gateway Configuration def test_create_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.create_pg_config(provider['id']) + fixture, data = self.create_pg_config( + self.merchant['id'], provider['id']) self.assertData(fixture, data) def test_get_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.create_pg_config(provider['id']) + fixture, data = self.create_pg_config( + self.merchant['id'], provider['id']) def test_get_pg_config_missing(self): self.assertMissing(self.storage_conn.get_pg_config, @@ -185,7 +186,8 @@ def test_get_pg_config_missing(self): def test_update_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.create_pg_config(provider['id']) + fixture, data = self.create_pg_config( + self.merchant['id'], provider['id']) fixture['properties'] = {"api": 1} updated = self.storage_conn.update_pg_config( @@ -195,14 +197,16 @@ def test_update_pg_config(self): def test_update_pg_config_missing(self): _, provider = self.pg_provider_register() - fixture, data = self.create_pg_config(provider['id']) + fixture, data = self.create_pg_config( + self.merchant['id'], provider['id']) self.assertMissing(self.storage_conn.update_pg_config, self.admin_ctxt, UUID, {}) def test_delete_pg_config(self): _, provider = self.pg_provider_register() - fixture, data = self.create_pg_config(provider['id']) + fixture, data = self.create_pg_config( + self.merchant['id'], provider['id']) self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) self.assertMissing(self.storage_conn.get_pg_config, @@ -214,35 +218,51 @@ def test_delete_pg_config_missing(self): # PaymentMethod def test_create_payment_method(self): + # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - m_id = provider['methods'][0]['id'] + _, config = self.create_pg_config(self.merchant['id'], provider['id']) _, customer = self.create_customer(self.merchant['id']) + # Setup PaymentMethod + values = { + 'provider_method_id': provider['methods'][0]['id'], + 'provider_config_id': config['id']} + fixture, data = self.create_payment_method( - customer['id'], values={'provider_method_id': m_id}) + customer['id'], values=values) self.assertData(fixture, data) def test_get_payment_method(self): + # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - m_id = provider['methods'][0]['id'] + _, config = self.create_pg_config(self.merchant['id'], provider['id']) _, customer = self.create_customer(self.merchant['id']) + # Setup PaymentMethod + values = { + 'provider_method_id': provider['methods'][0]['id'], + 'provider_config_id': config['id']} + _, expected = self.create_payment_method( - customer['id'], values={'provider_method_id': m_id}) + customer['id'], values=values) actual = self.storage_conn.get_payment_method(self.admin_ctxt, expected['id']) self.assertData(expected, actual) # TODO(ekarlso): Make this test more extensive? def test_list_payment_methods(self): - # Setup a PGP with it's sample methods + # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - m_id = provider['methods'][0]['id'] + _, config = self.create_pg_config(self.merchant['id'], provider['id']) + + values = { + 'provider_method_id': provider['methods'][0]['id'], + 'provider_config_id': config['id']} # Add two Customers with some methods _, customer1 = self.create_customer(self.merchant['id']) self.create_payment_method( - customer1['id'], values={'provider_method_id': m_id}) + customer1['id'], values=values) rows = self.storage_conn.list_payment_methods( self.admin_ctxt, criterion={'customer_id': customer1['id']}) @@ -250,10 +270,9 @@ def test_list_payment_methods(self): _, customer2 = self.create_customer(self.merchant['id']) self.create_payment_method( - customer2['id'], values={'provider_method_id': m_id}) + customer2['id'], values=values) self.create_payment_method( - customer2['id'], values={'provider_method_id': m_id}) - + customer2['id'], values=values) rows = self.storage_conn.list_payment_methods( self.admin_ctxt, criterion={'customer_id': customer2['id']}) @@ -264,12 +283,18 @@ def test_get_payment_method_missing(self): self.admin_ctxt, UUID) def test_update_payment_method(self): + # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - m_id = provider['methods'][0]['id'] + _, config = self.create_pg_config(self.merchant['id'], provider['id']) _, customer = self.create_customer(self.merchant['id']) + # Setup PaymentMethod + values = { + 'provider_method_id': provider['methods'][0]['id'], + 'provider_config_id': config['id']} + fixture, data = self.create_payment_method( - customer['id'], values={'provider_method_id': m_id}) + customer['id'], values=values) fixture['identifier'] = 1 updated = self.storage_conn.update_payment_method(self.admin_ctxt, @@ -282,12 +307,18 @@ def test_update_payment_method_missing(self): self.admin_ctxt, UUID, {}) def test_delete_payment_method(self): + # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - m_id = provider['methods'][0]['id'] + _, config = self.create_pg_config(self.merchant['id'], provider['id']) _, customer = self.create_customer(self.merchant['id']) + # Setup PaymentMethod + values = { + 'provider_method_id': provider['methods'][0]['id'], + 'provider_config_id': config['id']} + fixture, data = self.create_payment_method( - customer['id'], values={'provider_method_id': m_id}) + customer['id'], values=values) self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) self.assertMissing(self.storage_conn.get_payment_method, From ad9179c419b3cea8357b7b6a4e04bb9bd2624f75 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 00:37:21 +0000 Subject: [PATCH 054/182] Introduce error serialization --- billingstack/api/errors.py | 80 +++++++++++++++++++++++ billingstack/api/v1/__init__.py | 4 ++ billingstack/exceptions.py | 49 ++++++++++---- billingstack/tests/api/base.py | 6 +- etc/billingstack/api-paste.ini.sample | 7 +- etc/billingstack/billingstack.conf.sample | 12 ++++ 6 files changed, 141 insertions(+), 17 deletions(-) create mode 100644 billingstack/api/errors.py diff --git a/billingstack/api/errors.py b/billingstack/api/errors.py new file mode 100644 index 0000000..5cd9d68 --- /dev/null +++ b/billingstack/api/errors.py @@ -0,0 +1,80 @@ +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: Moniker +import flask +import webob.dec +from billingstack import exceptions +from billingstack import wsgi +from billingstack.openstack.common.rpc import common as rpc_common +from billingstack.openstack.common import log +from billingstack.openstack.common import jsonutils as json + +LOG = log.getLogger(__name__) + + +class FaultWrapperMiddleware(wsgi.Middleware): + @webob.dec.wsgify + def __call__(self, request): + try: + return request.get_response(self.application) + except exceptions.Base, e: + # Handle Moniker Exceptions + status = e.error_code if hasattr(e, 'error_code') else 500 + + # Start building up a response + response = { + 'code': status + } + + if hasattr(e, 'error_type'): + response['type'] = e.error_type + + if hasattr(e, 'errors'): + response['errors'] = e.errors + + response['message'] = e.get_message() + + return self._handle_exception(request, e, status, response) + except rpc_common.Timeout, e: + # Special case for RPC timeout's + response = { + 'code': 504, + 'type': 'timeout', + } + + return self._handle_exception(request, e, 504, response) + except Exception, e: + # Handle all other exception types + return self._handle_exception(request, e) + + def _handle_exception(self, request, e, status=500, response={}): + # Log the exception ASAP + LOG.exception(e) + + headers = [ + ('Content-Type', 'application/json'), + ] + + # Set a response code, if one is missing. + if 'code' not in response: + response['code'] = status + + # TODO(kiall): Send a fault notification + + # Return the new response + return flask.Response(status=status, headers=headers, + response=json.dumps(response)) diff --git a/billingstack/api/v1/__init__.py b/billingstack/api/v1/__init__.py index 9f48d24..5550aba 100644 --- a/billingstack/api/v1/__init__.py +++ b/billingstack/api/v1/__init__.py @@ -33,6 +33,10 @@ def factory(global_config, **local_conf): app = flask.Flask('billingstack.api.v1') + app.config.update( + PROPAGATE_EXCEPTIONS=True + ) + app.register_blueprint(v1_bp) # TODO(kiall): Ideally, we want to make use of the Plugin class here. diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py index 0699d31..c1fe2f1 100644 --- a/billingstack/exceptions.py +++ b/billingstack/exceptions.py @@ -13,10 +13,37 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import re class Base(Exception): - pass + error_code = 500 + message_tmpl = None + + def __init__(self, message='', *args, **kw): + self.message = message % kw if self.message_tmpl else message + + self.errors = kw.pop('errors', None) + super(Base, self).__init__(self.message) + + @property + def error_type(self): + name = "_".join(l.lower() for l in re.findall('[A-Z][^A-Z]*', + self.__class__.__name__)) + name = re.sub('_+remote$', '', name) + return name + + def __str__(self): + return self.message + + def get_message(self): + """ + Return the exception message or None + """ + if unicode(self): + return unicode(self) + else: + return None class NotImplemented(Base, NotImplementedError): @@ -27,21 +54,15 @@ class ConfigurationError(Base): pass -class InvalidObject(Base): - def __init__(self, *args, **kwargs): - self.errors = kwargs.pop('errors', None) - super(InvalidObject, self).__init__(*args, **kwargs) - - class BadRequest(Base): - pass + error_code = 400 -class Forbidden(Base): +class InvalidObject(BadRequest): pass -class InvalidSortKey(Base): +class InvalidSortKey(BadRequest): pass @@ -53,9 +74,13 @@ class InvalidOperator(Base): pass -class Duplicate(Base): +class Forbidden(Base): pass +class Duplicate(Base): + error_code = 409 + + class NotFound(Base): - pass + error_code = 404 diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 9668f29..70929ee 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -1,8 +1,6 @@ # -*- encoding: utf-8 -*- # -# Copyright © 2012 New Dream Network, LLC (DreamHost) -# -# Author: Doug Hellmann +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -19,6 +17,7 @@ Base classes for API tests. """ from billingstack.api.v1 import factory +from billingstack.api.errors import FaultWrapperMiddleware from billingstack.api.auth import NoAuthContextMiddleware from billingstack.openstack.common import jsonutils as json from billingstack.openstack.common import log @@ -162,6 +161,7 @@ def setUp(self): self.setSamples() self.app = factory({}) + self.app.wsgi_app = FaultWrapperMiddleware(self.app.wsgi_app) self.app.wsgi_app = NoAuthContextMiddleware(self.app.wsgi_app) self.client = self.app.test_client() diff --git a/etc/billingstack/api-paste.ini.sample b/etc/billingstack/api-paste.ini.sample index b43faaf..bad0a13 100644 --- a/etc/billingstack/api-paste.ini.sample +++ b/etc/billingstack/api-paste.ini.sample @@ -8,12 +8,15 @@ paste.app_factory = billingstack.api.versions:factory [composite:bs_core_api_v1] use = call:billingstack.api.auth:pipeline_factory -noauth = noauthcontext bs_core_app_v1 -keystone = authtoken keystonecontext bs_core_app_v1 +noauth = noauthcontext faultwrapper bs_core_app_v1 +keystone = authtoken keystonecontext faultwrapper bs_core_app_v1 [app:bs_core_app_v1] paste.app_factory = billingstack.api.v1:factory +[filter:faultwrapper] +paste.filter_factory = billingstack.api.errors:FaultWrapperMiddleware.factory + [filter:noauthcontext] paste.filter_factory = billingstack.api.auth:NoAuthContextMiddleware.factory diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 0446ca0..715dfe4 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -16,6 +16,8 @@ debug = True identity_driver = internal +allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack.common.exception + # Enabled API Version 1 extensions # #enabled_extensions_v1 = none @@ -26,6 +28,16 @@ identity_driver = internal # Port the bind the API server to #api_port = 9001 +[service:identity_api] +# Address to bind the API server +# api_host = 0.0.0.0 + +# Port the bind the API server to +api_port = 9092 + +admin_token = rand0m + + ####################### ## Storage Configuration ######################## From f6dc535438539b65489bb584be87c5dca7587372 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 09:19:48 +0000 Subject: [PATCH 055/182] Add CORS support and move Middleware --- billingstack/api/base.py | 17 ++++-- billingstack/api/cors.py | 60 +++++++++++++++++++++ billingstack/api/middleware/__init__.py | 0 billingstack/api/{ => middleware}/errors.py | 0 billingstack/api/v1/resources.py | 16 ++++++ etc/billingstack/api-paste.ini.sample | 2 +- etc/billingstack/billingstack.conf.sample | 3 ++ 7 files changed, 93 insertions(+), 5 deletions(-) create mode 100644 billingstack/api/cors.py create mode 100644 billingstack/api/middleware/__init__.py rename billingstack/api/{ => middleware}/errors.py (100%) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index 1d50bf2..a3cff8b 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -2,21 +2,25 @@ import mimetypes import traceback - from flask import abort, request, Blueprint, Response -from billingstack.openstack.common.wsgi import JSONDictSerializer, \ - XMLDictSerializer, JSONDeserializer - from wsme.types import Base, Enum, UserType, text, Unset, wsproperty from werkzeug.datastructures import MIMEAccept +from oslo.config import cfg +from billingstack.api.cors import crossdomain from billingstack.openstack.common import log +from billingstack.openstack.common.wsgi import JSONDictSerializer, \ + XMLDictSerializer, JSONDeserializer LOG = log.getLogger(__name__) +cfg.CONF.register_opts([ + cfg.StrOpt('allowed_origin', default='*', help='Allowed CORS Origin')]) + + class Property(UserType): """ A Property that just passes the value around... @@ -167,14 +171,19 @@ def delete(self, rule, status_code=204): def _mroute(self, methods, rule, status_code=None): if type(methods) is str: methods = [methods] + return self.route(rule, methods=methods, status_code=status_code) def route(self, rule, **options): + """ + Helper function that sets up the route as well as adding CORS.. + """ status = options.pop('status_code', None) def decorator(func): endpoint = options.pop('endpoint', func.__name__) + @crossdomain(origin=cfg.CONF.allowed_origin) def handler(**kwargs): # extract response content type resp_type = request.accept_mimetypes diff --git a/billingstack/api/cors.py b/billingstack/api/cors.py new file mode 100644 index 0000000..7fccb93 --- /dev/null +++ b/billingstack/api/cors.py @@ -0,0 +1,60 @@ +# -*- encoding: utf-8 -*- +## +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: http://flask.pocoo.org/snippets/56/ +from datetime import timedelta +from flask import make_response, request, current_app +from functools import update_wrapper + + +def crossdomain(origin=None, methods=None, headers=None, + max_age=21600, attach_to_all=True, + automatic_options=True): + if methods is not None: + methods = ', '.join(sorted(x.upper() for x in methods)) + if headers is not None and not isinstance(headers, basestring): + headers = ', '.join(x.upper() for x in headers) + if not isinstance(origin, basestring): + origin = ', '.join(origin) + if isinstance(max_age, timedelta): + max_age = max_age.total_seconds() + + def get_methods(): + if methods is not None: + return methods + + options_resp = current_app.make_default_options_response() + return options_resp.headers['allow'] + + def decorator(f): + def wrapped_function(*args, **kwargs): + if automatic_options and request.method == 'OPTIONS': + resp = current_app.make_default_options_response() + else: + resp = make_response(f(*args, **kwargs)) + if not attach_to_all and request.method != 'OPTIONS': + return resp + + h = resp.headers + + h['Access-Control-Allow-Origin'] = origin + h['Access-Control-Allow-Methods'] = get_methods() + h['Access-Control-Max-Age'] = str(max_age) + if headers is not None: + h['Access-Control-Allow-Headers'] = headers + return resp + + f.provide_automatic_options = False + return update_wrapper(wrapped_function, f) + return decorator diff --git a/billingstack/api/middleware/__init__.py b/billingstack/api/middleware/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/api/errors.py b/billingstack/api/middleware/errors.py similarity index 100% rename from billingstack/api/errors.py rename to billingstack/api/middleware/errors.py diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 3cf5f10..799d524 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -1,3 +1,19 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + from flask import request diff --git a/etc/billingstack/api-paste.ini.sample b/etc/billingstack/api-paste.ini.sample index bad0a13..a55b033 100644 --- a/etc/billingstack/api-paste.ini.sample +++ b/etc/billingstack/api-paste.ini.sample @@ -15,7 +15,7 @@ keystone = authtoken keystonecontext faultwrapper bs_core_app_v1 paste.app_factory = billingstack.api.v1:factory [filter:faultwrapper] -paste.filter_factory = billingstack.api.errors:FaultWrapperMiddleware.factory +paste.filter_factory = billingstack.api.middleware.errors:FaultWrapperMiddleware.factory [filter:noauthcontext] paste.filter_factory = billingstack.api.auth:NoAuthContextMiddleware.factory diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 715dfe4..b15e6f8 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -21,6 +21,9 @@ allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack. # Enabled API Version 1 extensions # #enabled_extensions_v1 = none +# CORS origin +# allowed_origin = * + [service:api] # Address to bind the API server # api_host = 0.0.0.0 From 4d5854353fb240ad3b4d7e4862380d25cb7cc88d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 13:35:03 +0000 Subject: [PATCH 056/182] Don't bother passing more then the merchant id in... --- billingstack/central/rpcapi.py | 5 +-- billingstack/central/service.py | 5 +-- .../storage/impl_sqlalchemy/__init__.py | 7 +-- billingstack/tests/base.py | 9 ++-- billingstack/tests/storage/__init__.py | 44 ++++++++++++++----- 5 files changed, 41 insertions(+), 29 deletions(-) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index cbfcb00..3eb0f40 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -110,10 +110,9 @@ def delete_pg_method(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_pg_method', id_=id_)) # PGC - def create_pg_config(self, ctxt, merchant_id, provider_id, values): + def create_pg_config(self, ctxt, merchant_id, values): return self.call(ctxt, self.make_msg('create_pg_config', - merchant_id=merchant_id, provider_id=provider_id, - values=values)) + merchant_id=merchant_id, values=values)) def list_pg_configs(self, ctxt, criterion=None): return self.call(ctxt, self.make_msg('list_pg_configs', diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 473bca0..9fda6a3 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -124,9 +124,8 @@ def update_pg_method(self, ctxt, id_, values): def delete_pg_method(self, ctxt, id_): return self.storage_conn.delete_pg_method(ctxt, id_) - def create_pg_config(self, ctxt, merchant_id, provider_id, values): - return self.storage_conn.create_pg_config(ctxt, merchant_id, - provider_id, values) + def create_pg_config(self, ctxt, merchant_id, values): + return self.storage_conn.create_pg_config(ctxt, merchant_id, values) def list_pg_configs(self, ctxt, **kw): return self.storage_conn.list_pg_configs(ctxt, **kw) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index e46bddc..bc4a614 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -270,13 +270,11 @@ def delete_pg_method(self, ctxt, id_): return self._delete(models.PGMethod, id_) # Payment Gateway Configuration - def create_pg_config(self, ctxt, merchant_id, provider_id, values): + def create_pg_config(self, ctxt, merchant_id, values): merchant = self._get(models.Merchant, merchant_id) - provider = self._get(models.PGProvider, provider_id) row = models.PGConfig(**values) row.merchant = merchant - row.provider = provider self._save(row) return dict(row) @@ -302,12 +300,9 @@ def create_payment_method(self, ctxt, customer_id, values): Configure a PaymentMethod like a CreditCard """ customer = self._get_id_or_name(models.Customer, customer_id) - provider_method = self._get_id_or_name( - models.PGMethod, values['provider_method_id']) row = models.PaymentMethod(**values) row.customer = customer - row.provider_method = provider_method self._save(row) return self._dict(row, extra=['provider_method']) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index a5f680b..116d1d6 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -189,12 +189,12 @@ def create_merchant(self, fixture=0, values={}, **kw): return fixture, self.central_service.create_merchant( ctxt, fixture, **kw) - def create_pg_config(self, merchant_id, provider_id, fixture=0, values={}, + def create_pg_config(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.central_service.create_pg_config( - ctxt, merchant_id, provider_id, fixture, **kw) + ctxt, merchant_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) @@ -203,12 +203,11 @@ def create_customer(self, merchant_id, fixture=0, values={}, **kw): return fixture, self.central_service.create_customer( ctxt, merchant_id, fixture, **kw) - def create_payment_method(self, customer_id, provider_method_id, fixture=0, - values={}, **kw): + def create_payment_method(self, customer_id, fixture=0, values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.central_service.create_payment_method( - ctxt, customer_id, provider_method_id, fixture, **kw) + ctxt, customer_id, fixture, **kw) def user_add(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('user', fixture, values) diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 8cab343..ebee9dc 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -66,12 +66,12 @@ def create_merchant(self, fixture=0, values={}, **kw): return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - def create_pg_config(self, merchant_id, provider_id, fixture=0, values={}, + def create_pg_config(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.storage_conn.create_pg_config( - ctxt, merchant_id, provider_id, fixture, **kw) + ctxt, merchant_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) @@ -171,14 +171,20 @@ def test_pg_provider_deregister_missing(self): # Payment Gateway Configuration def test_create_pg_config(self): _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} fixture, data = self.create_pg_config( - self.merchant['id'], provider['id']) + self.merchant['id'], values=values) + self.assertData(fixture, data) def test_get_pg_config(self): _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + fixture, data = self.create_pg_config( - self.merchant['id'], provider['id']) + self.merchant['id'], values=values) def test_get_pg_config_missing(self): self.assertMissing(self.storage_conn.get_pg_config, @@ -186,8 +192,11 @@ def test_get_pg_config_missing(self): def test_update_pg_config(self): _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + fixture, data = self.create_pg_config( - self.merchant['id'], provider['id']) + self.merchant['id'], values=values) fixture['properties'] = {"api": 1} updated = self.storage_conn.update_pg_config( @@ -197,16 +206,22 @@ def test_update_pg_config(self): def test_update_pg_config_missing(self): _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + fixture, data = self.create_pg_config( - self.merchant['id'], provider['id']) + self.merchant['id'], values=values) self.assertMissing(self.storage_conn.update_pg_config, self.admin_ctxt, UUID, {}) def test_delete_pg_config(self): _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + fixture, data = self.create_pg_config( - self.merchant['id'], provider['id']) + self.merchant['id'], values=values) self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) self.assertMissing(self.storage_conn.get_pg_config, @@ -220,7 +235,8 @@ def test_delete_pg_config_missing(self): def test_create_payment_method(self): # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - _, config = self.create_pg_config(self.merchant['id'], provider['id']) + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) _, customer = self.create_customer(self.merchant['id']) # Setup PaymentMethod @@ -235,7 +251,8 @@ def test_create_payment_method(self): def test_get_payment_method(self): # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - _, config = self.create_pg_config(self.merchant['id'], provider['id']) + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) _, customer = self.create_customer(self.merchant['id']) # Setup PaymentMethod @@ -253,7 +270,8 @@ def test_get_payment_method(self): def test_list_payment_methods(self): # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - _, config = self.create_pg_config(self.merchant['id'], provider['id']) + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) values = { 'provider_method_id': provider['methods'][0]['id'], @@ -285,7 +303,8 @@ def test_get_payment_method_missing(self): def test_update_payment_method(self): # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - _, config = self.create_pg_config(self.merchant['id'], provider['id']) + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) _, customer = self.create_customer(self.merchant['id']) # Setup PaymentMethod @@ -309,7 +328,8 @@ def test_update_payment_method_missing(self): def test_delete_payment_method(self): # Setup pgp / pgm / pgc _, provider = self.pg_provider_register() - _, config = self.create_pg_config(self.merchant['id'], provider['id']) + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) _, customer = self.create_customer(self.merchant['id']) # Setup PaymentMethod From 6de1343598dbcd1e939f4127d4dc54d4ab6d2579 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 14:11:59 +0000 Subject: [PATCH 057/182] LOG before the assert --- billingstack/tests/api/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 70929ee..6829966 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -124,10 +124,10 @@ def put(self, path, data, headers=None, content_type="application/json", content_type=content_type, headers=headers) - self.assertEqual(response.status_code, status_code) - LOG.debug('PUT RESPONSE: %r' % response.data) + self.assertEqual(response.status_code, status_code) + self.load_content(response) return response From 1958e27d719026414f560effe6386ccc3a8b511a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 14:12:16 +0000 Subject: [PATCH 058/182] Fix missing parts of path --- billingstack/api/v1/resources.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 799d524..eff6f43 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -340,7 +340,8 @@ def get_payment_method(merchant_id, customer_id, pm_id): return render(models.PaymentMethod.from_db(row)) -@bp.put('/merchants//customers/') +@bp.put('/merchants//customers//payment-methods/' + '') def update_payment_method(merchant_id, customer_id, pm_id): data = request_data(models.PaymentMethod) From 74144dd606f5b216a70d4f6b6997e7b451bd1a94 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 14:16:58 +0000 Subject: [PATCH 059/182] PaymentMethod tests --- .../tests/api/v1/test_payment_method.py | 104 ++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 billingstack/tests/api/v1/test_payment_method.py diff --git a/billingstack/tests/api/v1/test_payment_method.py b/billingstack/tests/api/v1/test_payment_method.py new file mode 100644 index 0000000..27eb749 --- /dev/null +++ b/billingstack/tests/api/v1/test_payment_method.py @@ -0,0 +1,104 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +Test Products +""" + +import logging + +from billingstack.tests.api.base import FunctionalTest + +LOG = logging.getLogger(__name__) + + +class TestPaymentMethod(FunctionalTest): + __test__ = True + path = "merchants/%s/customers/%s/payment-methods" + + def setUp(self): + super(TestPaymentMethod, self).setUp() + _, self.provider = self.pg_provider_register() + + _, self.customer = self.create_customer(self.merchant['id']) + _, self.pg_config = self.create_pg_config( + self.merchant['id'], values={'provider_id': self.provider['id']}) + + def test_create_payment_method(self): + fixture = self.get_fixture('payment_method') + fixture['provider_method_id'] = self.provider['methods'][0]['id'] + fixture['provider_config_id'] = self.pg_config['id'] + + url = self.path % (self.merchant['id'], self.customer['id']) + + resp = self.post(url, fixture) + + self.assertData(fixture, resp.json) + + def test_list_payment_methods(self): + values = { + 'provider_method_id': self.provider['methods'][0]['id'], + 'provider_config_id': self.pg_config['id'] + } + self.create_payment_method(self.customer['id'], values=values) + + url = self.path % (self.merchant['id'], self.customer['id']) + resp = self.get(url) + + self.assertLen(1, resp.json) + + def test_get_payment_method(self): + values = { + 'provider_method_id': self.provider['methods'][0]['id'], + 'provider_config_id': self.pg_config['id'] + } + _, method = self.create_payment_method( + self.customer['id'], values=values) + + url = self.item_path(self.merchant['id'], + self.customer['id'], method['id']) + + resp = self.get(url) + + self.assertData(resp.json, method) + + def test_update_payment_method(self): + values = { + 'provider_method_id': self.provider['methods'][0]['id'], + 'provider_config_id': self.pg_config['id'] + } + fixture, method = self.create_payment_method( + self.customer['id'], values=values) + + url = self.item_path(self.merchant['id'], + self.customer['id'], method['id']) + + expected = dict(fixture, name='test2') + resp = self.put(url, expected) + self.assertData(expected, resp.json) + + def test_delete_payment_method(self): + values = { + 'provider_method_id': self.provider['methods'][0]['id'], + 'provider_config_id': self.pg_config['id'] + } + _, method = self.create_payment_method( + self.customer['id'], values=values) + + url = self.item_path(self.merchant['id'], + self.customer['id'], method['id']) + self.delete(url) + + self.assertLen(0, self.central_service.list_products(self.admin_ctxt)) From d2825a84232d4b972e5bc5194c4a062522e8ca38 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 19:16:34 +0000 Subject: [PATCH 060/182] See comments at http://flask.pocoo.org/snippets/56/ --- billingstack/api/cors.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/billingstack/api/cors.py b/billingstack/api/cors.py index 7fccb93..de56694 100644 --- a/billingstack/api/cors.py +++ b/billingstack/api/cors.py @@ -39,6 +39,7 @@ def get_methods(): def decorator(f): def wrapped_function(*args, **kwargs): + if automatic_options and request.method == 'OPTIONS': resp = current_app.make_default_options_response() else: @@ -56,5 +57,6 @@ def wrapped_function(*args, **kwargs): return resp f.provide_automatic_options = False + f.required_methods = ['OPTIONS'] return update_wrapper(wrapped_function, f) return decorator From 0464e0b16bbbdb50832df4054f1fd1f8191be2a9 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 19:19:16 +0000 Subject: [PATCH 061/182] Add comments --- billingstack/api/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index a3cff8b..bc4749f 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -183,6 +183,7 @@ def route(self, rule, **options): def decorator(func): endpoint = options.pop('endpoint', func.__name__) + # NOTE: Wrap the function with CORS support. @crossdomain(origin=cfg.CONF.allowed_origin) def handler(**kwargs): # extract response content type From 47c85f74df8e139653abc4daab71dd2c984ea10a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 19:42:03 +0000 Subject: [PATCH 062/182] Add more CORS header stuff --- billingstack/api/base.py | 16 ++++++++++++++-- billingstack/api/cors.py | 1 + 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index bc4749f..e3bd6eb 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -18,7 +18,17 @@ cfg.CONF.register_opts([ - cfg.StrOpt('allowed_origin', default='*', help='Allowed CORS Origin')]) + cfg.StrOpt('cors_allowed_origin', default='*', help='Allowed CORS Origin'), + cfg.IntOpt('cors_max_age', default=3600)]) + + +CORS_ALLOW_HEADERS = [ + 'origin', + 'authorization', + 'accept', + 'content-type', + 'x-requested-with' +] class Property(UserType): @@ -184,7 +194,9 @@ def decorator(func): endpoint = options.pop('endpoint', func.__name__) # NOTE: Wrap the function with CORS support. - @crossdomain(origin=cfg.CONF.allowed_origin) + @crossdomain(origin=cfg.CONF.cors_allowed_origin, + max_age=cfg.CONF.cors_max_age, + headers=",".join(CORS_ALLOW_HEADERS)) def handler(**kwargs): # extract response content type resp_type = request.accept_mimetypes diff --git a/billingstack/api/cors.py b/billingstack/api/cors.py index de56694..fb9319d 100644 --- a/billingstack/api/cors.py +++ b/billingstack/api/cors.py @@ -50,6 +50,7 @@ def wrapped_function(*args, **kwargs): h = resp.headers h['Access-Control-Allow-Origin'] = origin + h['Access-Control-Allow-Credentials'] = 'true' h['Access-Control-Allow-Methods'] = get_methods() h['Access-Control-Max-Age'] = str(max_age) if headers is not None: From 2b47417b158a077ebfaa1e43d8dbd0df41892774 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 19:43:13 +0000 Subject: [PATCH 063/182] Fix more CORS options --- etc/billingstack/billingstack.conf.sample | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index b15e6f8..0b098bc 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -21,8 +21,9 @@ allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack. # Enabled API Version 1 extensions # #enabled_extensions_v1 = none -# CORS origin -# allowed_origin = * +# CORS settings +# cors_allowed_origin = * +# cors_max_age = 3600 [service:api] # Address to bind the API server From 7ae97046f8e56ad5f607aaa61db32f840c12c1b7 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 24 Mar 2013 22:21:13 +0000 Subject: [PATCH 064/182] Not changed after move --- billingstack/tests/api/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 6829966..037b525 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -17,7 +17,7 @@ Base classes for API tests. """ from billingstack.api.v1 import factory -from billingstack.api.errors import FaultWrapperMiddleware +from billingstack.api.middleware.errors import FaultWrapperMiddleware from billingstack.api.auth import NoAuthContextMiddleware from billingstack.openstack.common import jsonutils as json from billingstack.openstack.common import log From 1721b057d92e81738676690b8fd2f2418b379a3b Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 25 Mar 2013 17:37:25 +0000 Subject: [PATCH 065/182] Change to JSON for pricing --- .../storage/impl_sqlalchemy/models.py | 25 +++---------------- 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index c6789ea..ae7e6a8 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -14,7 +14,7 @@ from sqlalchemy import Column, ForeignKey, UniqueConstraint from sqlalchemy import Integer, Float from sqlalchemy import DateTime, Unicode -from sqlalchemy.orm import relationship, backref +from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base, declared_attr from billingstack import utils @@ -292,21 +292,6 @@ class InvoiceLine(BASE, BaseMixin): onupdate='CASCADE'), nullable=False) -class Pricing(BASE, BaseMixin): - """ - Resembles a Price information in some way - """ - __tablename__ = 'product_pricing' - value_from = Column(Float) - value_to = Column(Float) - price = Column(Float, nullable=False) - - plan_item_id = Column(UUID, ForeignKey('plan_item.id', ondelete='CASCADE', - onupdate='CASCADE')) - product_id = Column(UUID, ForeignKey('product.id', ondelete='CASCADE', - onupdate='CASCADE')) - - class Plan(BASE, BaseMixin): """ A Product collection like a "Virtual Web Cluster" with 10 servers @@ -336,11 +321,7 @@ class PlanProperty(BASE, PropertyMixin): class PlanItem(BASE, BaseMixin): description = Column(Unicode(255)) - price_rules = relationship( - 'Pricing', - backref=backref('plan_items', uselist=False), - lazy='dynamic', cascade='delete, delete-orphan', - passive_deletes=True) + pricing = Column(JSON) plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), onupdate='CASCADE', nullable=False) @@ -358,7 +339,7 @@ class Product(BASE, BaseMixin): title = Column(Unicode(100)) description = Column(Unicode(255)) - price = relationship('Pricing', backref='product', uselist=False) + pricing = Column(JSON) merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), nullable=False) From b3ba622114497c47bc21b8cd898e84d264264fa3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 26 Mar 2013 23:51:18 +0000 Subject: [PATCH 066/182] Log WSME as well.. --- bin/billingstack-api | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/billingstack-api b/bin/billingstack-api index 71c34c0..639349d 100644 --- a/bin/billingstack-api +++ b/bin/billingstack-api @@ -30,6 +30,7 @@ eventlet.monkey_patch() utils.read_config('billingstack', sys.argv) logging.setup('billingstack') +logging.setup('wsme') launcher = service.launch(api_service.Service(), cfg.CONF['service:api'].workers) From 754847ece13becb10fd823f6d90c3e56cba56621 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 27 Mar 2013 14:22:51 +0000 Subject: [PATCH 067/182] Multiple API fixes. * Remove _query_to_kwargs * Use WSME for deserialization and serialization. --- billingstack/api/base.py | 105 ++---- billingstack/api/{cors.py => utils.py} | 9 +- billingstack/api/v1/models.py | 12 + billingstack/api/v1/resources.py | 354 +++++++++--------- billingstack/central/rpcapi.py | 5 +- billingstack/central/service.py | 4 +- .../storage/impl_sqlalchemy/__init__.py | 5 +- 7 files changed, 235 insertions(+), 259 deletions(-) rename billingstack/api/{cors.py => utils.py} (91%) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index e3bd6eb..b43215b 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -1,4 +1,4 @@ -import inspect +import functools import mimetypes import traceback @@ -8,7 +8,7 @@ from oslo.config import cfg -from billingstack.api.cors import crossdomain +from billingstack.api import utils from billingstack.openstack.common import log from billingstack.openstack.common.wsgi import JSONDictSerializer, \ XMLDictSerializer, JSONDeserializer @@ -91,48 +91,6 @@ def as_dict(self): } -def _query_to_kwargs(query, db_func): - # TODO(dhellmann): This function needs tests of its own. - valid_keys = inspect.getargspec(db_func)[0] - if 'self' in valid_keys: - valid_keys.remove('self') - translation = {'user_id': 'user', - 'project_id': 'project', - 'resource_id': 'resource'} - stamp = {} - trans = {} - metaquery = {} - for i in query: - if i.field == 'timestamp': - # FIXME(dhellmann): This logic is not consistent with the - # way the timestamps are treated inside the mongo driver - # (the end timestamp is always tested using $lt). We - # should just pass a single timestamp through to the - # storage layer with the operator and let the storage - # layer use that operator. - if i.op in ('lt', 'le'): - stamp['end_timestamp'] = i.value - elif i.op in ('gt', 'ge'): - stamp['start_timestamp'] = i.value - else: - LOG.warn('_query_to_kwargs ignoring %r unexpected op %r"' % - (i.field, i.op)) - else: - if i.op != 'eq': - LOG.warn('_query_to_kwargs ignoring %r unimplemented op %r' % - (i.field, i.op)) - elif i.field == 'search_offset': - stamp['search_offset'] = i.value - elif i.field.startswith('metadata.'): - metaquery[i.field] = i.value - else: - trans[translation.get(i.field, i.field)] = i.value - - kwargs = {} - if metaquery and 'metaquery' in valid_keys: - kwargs['metaquery'] = metaquery - - class ModelBase(Base): def as_dict(self): """ @@ -166,25 +124,37 @@ def from_db(cls, values): class Rest(Blueprint): - def get(self, rule, status_code=200): - return self._mroute('GET', rule, status_code) + """ + Helper to do stuff + """ + def get(self, rule, status_code=200, **kw): + return self._mroute('GET', rule, status_code, **kw) - def post(self, rule, status_code=202): - return self._mroute('POST', rule, status_code) + def post(self, rule, status_code=202, **kw): + return self._mroute('POST', rule, status_code, **kw) - def put(self, rule, status_code=202): - return self._mroute('PUT', rule, status_code) + def put(self, rule, status_code=202, **kw): + return self._mroute('PUT', rule, status_code, **kw) - def delete(self, rule, status_code=204): - return self._mroute('DELETE', rule, status_code) + def delete(self, rule, status_code=204, **kw): + return self._mroute('DELETE', rule, status_code, **kw) - def _mroute(self, methods, rule, status_code=None): + def _mroute(self, methods, rule, status_code=None, **kw): if type(methods) is str: methods = [methods] - return self.route(rule, methods=methods, status_code=status_code) + return self.route(rule, methods=methods, status_code=status_code, + **kw) - def route(self, rule, **options): + def guess_response_type(self, type_suffix=None): + """ + Get the MIME type based on keywords / request + """ + if type_suffix: + response_type = mimetypes.guess_type("res." + type_suffix)[0] + request.response_type = response_type + + def route(self, rule, sig_args=[], sig_kw={}, **options): """ Helper function that sets up the route as well as adding CORS.. """ @@ -193,19 +163,17 @@ def route(self, rule, **options): def decorator(func): endpoint = options.pop('endpoint', func.__name__) + if 'body' in options and 'body' not in sig_kw: + sig_kw['body'] = options['body'] + # NOTE: Wrap the function with CORS support. - @crossdomain(origin=cfg.CONF.cors_allowed_origin, - max_age=cfg.CONF.cors_max_age, - headers=",".join(CORS_ALLOW_HEADERS)) - def handler(**kwargs): + @utils.crossdomain(origin=cfg.CONF.cors_allowed_origin, + max_age=cfg.CONF.cors_max_age, + headers=",".join(CORS_ALLOW_HEADERS)) + @functools.wraps(func) + def handler(**kw): # extract response content type - resp_type = request.accept_mimetypes - type_suffix = kwargs.pop('resp_type', None) - if type_suffix: - suffix_mime = mimetypes.guess_type("res." + type_suffix)[0] - if suffix_mime: - resp_type = MIMEAccept([(suffix_mime, 1)]) - request.resp_type = resp_type + self.guess_response_type(kw.pop('response_type', None)) # NOTE: Extract fields (column selection) fields = list(set(request.args.getlist('fields'))) @@ -215,16 +183,15 @@ def handler(**kwargs): if status: request.status_code = status - return func(**kwargs) + return func(**kw) #_rule = "/" + rule # NOTE: Add 2 set of rules, 1 with response content type and one wo self.add_url_rule(rule, endpoint, handler, **options) - rtype_rule = rule + '.' + rtype_rule = rule + '.' self.add_url_rule(rtype_rule, endpoint, handler, **options) return func - return decorator diff --git a/billingstack/api/cors.py b/billingstack/api/utils.py similarity index 91% rename from billingstack/api/cors.py rename to billingstack/api/utils.py index fb9319d..85346fe 100644 --- a/billingstack/api/cors.py +++ b/billingstack/api/utils.py @@ -15,7 +15,7 @@ # Copied: http://flask.pocoo.org/snippets/56/ from datetime import timedelta from flask import make_response, request, current_app -from functools import update_wrapper +import functools def crossdomain(origin=None, methods=None, headers=None, @@ -38,12 +38,11 @@ def get_methods(): return options_resp.headers['allow'] def decorator(f): - def wrapped_function(*args, **kwargs): - + def wrapped_function(*args, **kw): if automatic_options and request.method == 'OPTIONS': resp = current_app.make_default_options_response() else: - resp = make_response(f(*args, **kwargs)) + resp = make_response(f(*args, **kw)) if not attach_to_all and request.method != 'OPTIONS': return resp @@ -59,5 +58,5 @@ def wrapped_function(*args, **kwargs): f.provide_automatic_options = False f.required_methods = ['OPTIONS'] - return update_wrapper(wrapped_function, f) + return functools.update_wrapper(wrapped_function, f) return decorator diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 62e28ed..7311c75 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -100,6 +100,14 @@ class Product(DescribedBase): properties = DictType(key_type=text, value_type=property_type) +class InvoiceLine(Base): + description = text + price = float + quantity = float + sub_total = float + invoice_id = text + + class Invoice(Base): identifier = text sub_total = float @@ -118,6 +126,10 @@ class Subscription(Base): payment_method_id = text +class Usage(Base): + pass + + class PGConfig(Base): name = text title = text diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index eff6f43..34bf608 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -21,50 +21,49 @@ from billingstack.api.v1 import models from billingstack.central.rpcapi import central_api - -bp = Rest('v1', __name__) +from wsmeext.flask import signature -@bp.get('/') -def index(): - return render() +bp = Rest('v1', __name__) # Currencies @bp.post('/currencies') -def create_currency(): +@signature(models.Currency, body=models.Currency) +def create_currency(body): data = request_data(models.Currency) row = central_api.create_currency(request.environ['context'], data) - - return render(models.Currency.from_db(row)) + return models.Currency.from_db(row) + #return render(models.Currency.from_db(row)) @bp.get('/currencies') +@signature([models.Currency]) def list_currencies(): rows = central_api.list_currencies(request.environ['context']) - return render([models.Currency.from_db(r) for r in rows]) + return map(models.Currency.from_db, rows) @bp.get('/currencies/') +@signature(models.Currency, str) def get_currency(currency_id): row = central_api.get_currency(request.environ['context'], currency_id) - return render(models.Currency.from_db(row)) + return models.Currency.from_db(row) @bp.put('/currencies/') -def update_currency(currency_id): - data = request_data(models.Currency) - +@signature(models.Currency, str, body=models.Currency) +def update_currency(currency_id, body): row = central_api.update_currency( request.environ['context'], currency_id, - data) + body.to_db()) - return render(models.Currency.from_db(row)) + return models.Currency.from_db(row) @bp.delete('/currencies/') @@ -75,39 +74,40 @@ def delete_currency(currency_id): # Language @bp.post('/languages') -def create_language(): - data = request_data(models.Language) +@signature(models.Language, body=models.Language) +def create_language(body): + row = central_api.create_language(request.environ['context'], + body.to_db()) - row = central_api.create_language(request.environ['context'], data) - - return render(models.Language.from_db(row)) + return models.Language.from_db(row) @bp.get('/languages') +@signature([models.Language]) def list_languages(): rows = central_api.list_languages(request.environ['context']) - return render([models.Language.from_db(r) for r in rows]) + return map(models.Language.from_db, rows) @bp.get('/languages/') +@signature(models.Language, str) def get_language(language_id): row = central_api.get_language(request.environ['context'], language_id) - return render(models.Language.from_db(row)) + return models.Language.from_db(row) @bp.put('/languages/') -def update_language(language_id): - data = request_data(models.Language) - +@signature(models.Language, str, body=models.Language) +def update_language(language_id, body): row = central_api.update_language( request.environ['context'], language_id, - data) + body.to_db()) - return render(models.Language.from_db(row)) + return models.Language.from_db(row) @bp.delete('/languages/') @@ -118,54 +118,57 @@ def delete_language(language_id): # PGP / PGM @bp.get('/payment-gateway-providers') +@signature([models.PGProvider]) def list_pg_providers(): rows = central_api.list_pg_providers(request.environ['context']) - return render([models.PGProvider.from_db(r) for r in rows]) + return map(models.PGProvider.from_db, rows) @bp.get('/payment-gateway-providers//methods') -def list_pg_methods(): +@signature([models.PGMethod], str) +def list_pg_methods(pgp_id): rows = central_api.list_pg_methods(request.environ['context']) - return render([models.PGMethod.from_db(r) for r in rows]) + return map(models.PGMethod.from_db, rows) # invoice_states @bp.post('/invoice-states') -def create_invoice_state(): - data = request_data(models.InvoiceState) - - row = central_api.create_invoice_state(request.environ['context'], data) +@signature(models.InvoiceState, body=models.InvoiceState) +def create_invoice_state(body): + row = central_api.create_invoice_state( + request.environ['context'], body.to_db()) - return render(models.InvoiceState.from_db(row)) + return models.InvoiceState.from_db(row) @bp.get('/invoice-states') +@signature([models.InvoiceState]) def list_invoice_states(): rows = central_api.list_invoice_states(request.environ['context']) - return render([models.InvoiceState.from_db(r) for r in rows]) + return map(models.InvoiceState.from_db, rows) @bp.get('/invoice-states/') +@signature(models.InvoiceState, str) def get_invoice_state(state_id): row = central_api.get_invoice_state(request.environ['context'], state_id) - return render(models.InvoiceState.from_db(row)) + return models.InvoiceState.from_db(row) @bp.put('/invoice-states/') -def update_invoice_state(state_id): - data = request_data(models.InvoiceState) - +@signature(models.InvoiceState, str, body=models.InvoiceState) +def update_invoice_state(state_id, body): row = central_api.update_invoice_state( request.environ['context'], state_id, - data) + body.to_db()) - return render(models.InvoiceState.from_db(row)) + return models.InvoiceState.from_db(row) @bp.delete('/invoice-states/') @@ -178,39 +181,40 @@ def delete_invoice_state(state_id): # merchants @bp.post('/merchants') -def create_merchant(): - data = request_data(models.Merchant) +@signature(models.Merchant, body=models.Merchant) +def create_merchant(body): + row = central_api.create_merchant(request.environ['context'], + body.to_db()) - row = central_api.create_merchant(request.environ['context'], data) - - return render(models.Merchant.from_db(row)) + return models.Merchant.from_db(row) @bp.get('/merchants') +@signature([models.Merchant]) def list_merchants(): rows = central_api.list_merchants(request.environ['context']) - return render([models.Merchant.from_db(r) for r in rows]) + return map(models.Merchant.from_db, rows) @bp.get('/merchants/') +@signature(models.Merchant, str) def get_merchant(merchant_id): row = central_api.get_merchant(request.environ['context'], merchant_id) - return render(models.Merchant.from_db(row)) + return models.Merchant.from_db(row) @bp.put('/merchants/') -def update_merchant(merchant_id): - data = request_data(models.Merchant) - +@signature(models.Merchant, str, body=models.Merchant) +def update_merchant(merchant_id, body): row = central_api.update_merchant( request.environ['context'], merchant_id, - data) + body.to_db()) - return render(models.Merchant.from_db(row)) + return models.Merchant.from_db(row) @bp.delete('/merchants/') @@ -221,41 +225,41 @@ def delete_merchant(merchant_id): # Invoices @bp.post('/merchants//invoices') -def create_payment_gateway(merchant_id): - data = request_data(models.Invoice) - +@signature(models.PGConfig, str, body=models.PGConfig) +def create_payment_gateway(merchant_id, body): row = central_api.create_pg_config( request.environ['context'], merchant_id, - data) + body.to_db()) - return render(models.Invoice.from_db(row)) + return models.PGConfig.from_db(row) @bp.get('/merchants//payment-gateways') +@signature([models.PGConfig], str) def list_payment_gateways(merchant_id): rows = central_api.list_pg_config(request.environ['context']) - return render([models.Invoice.from_db(r) for r in rows]) + return map(models.PGConfig.from_db, rows) @bp.get('/merchants//payment-gateways/') +@signature(models.PGConfig, str, str) def get_payment_gateway(merchant_id, pg_config_id): row = central_api.get_pg_config(request.environ['context'], pg_config_id) - return render(models.Invoice.from_db(row)) + return models.PGConfig.from_db(row) @bp.put('/merchants//payment-gateways/') -def update_payment_gateway(merchant_id, pg_config_id): - data = request_data(models.Invoice) - +@signature(models.PGConfig, str, str, body=models.PGConfig) +def update_payment_gateway(merchant_id, pg_config_id, body): row = central_api.update_pg_config( request.environ['context'], pg_config_id, - data) + body.to_db()) - return render(models.Invoice.from_db(row)) + return models.PGConfig.from_db(row) @bp.delete('/merchants//payment-gateways/') @@ -268,42 +272,42 @@ def delete_pg_config(merchant_id, pg_config_id): # customers @bp.post('/merchants//customers') -def create_customer(merchant_id): - data = request_data(models.Customer) - +@signature(models.Customer, str, body=models.Customer) +def create_customer(merchant_id, body): row = central_api.create_customer( request.environ['context'], merchant_id, - data) + body.to_db()) - return render(models.Customer.from_db(row)) + return models.Customer.from_db(row) @bp.get('/merchants//customers') +@signature([models.Customer], str) def list_customers(merchant_id): rows = central_api.list_customers(request.environ['context']) - return render([models.Customer.from_db(r) for r in rows]) + return map(models.Customer.from_db, rows) @bp.get('/merchants//customers/') +@signature(models.Customer, str, str) def get_customer(merchant_id, customer_id): row = central_api.get_customer(request.environ['context'], customer_id) - return render(models.Customer.from_db(row)) + return models.Customer.from_db(row) @bp.put('/merchants//customers/') -def update_customer(merchant_id, customer_id): - data = request_data(models.Customer) - +@signature(models.Customer, str, str, body=models.Customer) +def update_customer(merchant_id, customer_id, body): row = central_api.update_customer( request.environ['context'], customer_id, - data) + body.to_db()) - return render(models.Customer.from_db(row)) + return models.Customer.from_db(row) @bp.delete('/merchants//customers/') @@ -314,41 +318,41 @@ def delete_customer(merchant_id, customer_id): # PaymentMethods @bp.post('/merchants//customers//payment-methods') -def create_payment_method(merchant_id, customer_id): - data = request_data(models.PaymentMethod) - +@signature(models.PaymentMethod, str, str, body=models.PaymentMethod) +def create_payment_method(merchant_id, customer_id, body): row = central_api.create_payment_method( request.environ['context'], customer_id, - data) + body.to_db()) - return render(models.PaymentMethod.from_db(row)) + return models.PaymentMethod.from_db(row) @bp.get('/merchants//customers//payment-methods') +@signature([models.PaymentMethod], str, str) def list_payment_methods(merchant_id, customer_id): rows = central_api.list_payment_methods(request.environ['context']) - return render([models.PaymentMethod.from_db(r) for r in rows]) + return map(models.PaymentMethod.from_db, rows) @bp.get('/merchants//customers//payment-methods/' '') +@signature(models.PaymentMethod, str, str, str) def get_payment_method(merchant_id, customer_id, pm_id): row = central_api.get_payment_method(request.environ['context'], pm_id) - return render(models.PaymentMethod.from_db(row)) + return models.PaymentMethod.from_db(row) @bp.put('/merchants//customers//payment-methods/' '') -def update_payment_method(merchant_id, customer_id, pm_id): - data = request_data(models.PaymentMethod) - +@signature(models.PaymentMethod, str, str, str, body=models.PaymentMethod) +def update_payment_method(merchant_id, customer_id, pm_id, body): row = central_api.update_payment_method(request.environ['context'], pm_id, - data) + body.to_db()) - return render(models.PaymentMethod.from_db(row)) + return models.PaymentMethod.from_db(row) @bp.delete('/merchants//customers//payment-methods/' @@ -360,42 +364,42 @@ def delete_payment_method(merchant_id, customer_id, pm_id): # Plans @bp.post('/merchants//plans') -def create_plan(merchant_id): - data = request_data(models.Plan) - +@signature(models.Plan, str, body=models.Plan) +def create_plan(merchant_id, body): row = central_api.create_plan( request.environ['context'], merchant_id, - data) + body.to_db()) - return render(models.Plan.from_db(row)) + return models.Plan.from_db(row) @bp.get('/merchants//plans') +@signature([models.Plan], str) def list_plans(merchant_id): rows = central_api.list_plans(request.environ['context']) - return render([models.Plan.from_db(r) for r in rows]) + return map(models.Plan.from_db, rows) @bp.get('/merchants//plans/') +@signature(models.Plan, str, str) def get_plan(merchant_id, plan_id): row = central_api.get_plan(request.environ['context'], plan_id) - return render(models.Plan.from_db(row)) + return models.Plan.from_db(row) @bp.put('/merchants//plans/') -def update_plan(merchant_id, plan_id): - data = request_data(models.Plan) - +@signature(models.Plan, str, str, body=models.Plan) +def update_plan(merchant_id, plan_id, body): row = central_api.update_plan( request.environ['context'], plan_id, - data) + body.to_db()) - return render(models.Plan.from_db(row)) + return models.Plan.from_db(row) @bp.delete('/merchants//plans/') @@ -406,42 +410,42 @@ def delete_plan(merchant_id, plan_id): # Products @bp.post('/merchants//products') -def create_product(merchant_id): - data = request_data(models.Product) - +@signature(models.Product, str, body=models.Product) +def create_product(merchant_id, body): row = central_api.create_product( request.environ['context'], merchant_id, - data) + body.to_db()) - return render(models.Product.from_db(row)) + return models.Product.from_db(row) @bp.get('/merchants//products') +@signature([models.Product], str) def list_products(merchant_id): rows = central_api.list_products(request.environ['context']) - return render([models.Product.from_db(r) for r in rows]) + return map(models.Product.from_db, rows) @bp.get('/merchants//products/') +@signature(models.Product, str, str) def get_product(merchant_id, product_id): row = central_api.get_product(request.environ['context'], product_id) - return render(models.Product.from_db(row)) + return models.Product.from_db(row) @bp.put('/merchants//products/') -def update_product(merchant_id, product_id): - data = request_data(models.Product) - +@signature(models.Product, str, str, body=models.Product) +def update_product(merchant_id, product_id, body): row = central_api.update_product( request.environ['context'], product_id, - data) + body.to_db()) - return render(models.Product.from_db(row)) + return models.Product.from_db(row) @bp.delete('/merchants//products/') @@ -452,82 +456,88 @@ def delete_product(merchant_id, product_id): # Invoices @bp.post('/merchants//invoices') -def create_invoice(merchant_id): - data = request_data(models.Invoice) - +@signature(models.Invoice, str, body=models.Invoice) +def create_invoice(merchant_id, body): row = central_api.create_invoice( request.environ['context'], merchant_id, - data) + body.to_db()) - return render(models.Invoice.from_db(row)) + return models.Invoice.from_db(row) @bp.get('/merchants//invoices') +@signature([models.InvoiceState], str) def list_invoices(merchant_id): rows = central_api.list_invoices(request.environ['context']) - return render([models.Invoice.from_db(r) for r in rows]) + return map(models.Invoice.from_db, rows) @bp.get('/merchants//invoices/') +@signature(models.Invoice, str, str) def get_invoice(merchant_id, invoice_id): row = central_api.get_invoice(request.environ['context'], invoice_id) - return render(models.Invoice.from_db(row)) + return models.Invoice.from_db(row) @bp.put('/merchants//invoices/') -def update_invoice(merchant_id, invoice_id): - data = request_data(models.Invoice) - +@signature(models.Invoice, str, str, body=models.Invoice) +def update_invoice(merchant_id, invoice_id, body): row = central_api.update_invoice( request.environ['context'], invoice_id, - data) + body.to_db()) - return render(models.Invoice.from_db(row)) + return models.Invoice.from_db(row) + + +@bp.delete('/merchants//invoices/') +def delete_invoice(merchant_id, invoice_id): + central_api.delete_invoice(request.environ['context'], invoice_id) + return render() # Products @bp.post('/merchants//invoices//lines') -def create_invoice_line(merchant_id, invoice_id): - data = request_data(models.Product) - +@signature(models.InvoiceLine, str, str, body=models.InvoiceLine) +def create_invoice_line(merchant_id, invoice_id, body): row = central_api.create_invoice_line( request.environ['context'], invoice_id, - data) + body.to_db()) - return render(models.Product.from_db(row)) + return models.Product.from_db(row) @bp.get('/merchants//invoices//lines') +@signature([models.InvoiceLine], str, str) def list_invoice_lines(merchant_id, invoice_id): rows = central_api.list_invoice_lines(request.environ['context']) - return render([models.Product.from_db(r) for r in rows]) + return map(models.Product.from_db, rows) @bp.get('/merchants//invoices//lines/') +@signature(models.InvoiceLine, str, str, str) def get_invoice_line(merchant_id, invoice_id, line_id): row = central_api.get_invoice_line(request.environ['context'], line_id) - return render(models.Product.from_db(row)) + return models.Product.from_db(row) @bp.put('/merchants//invoices//lines/') -def update_invoice_line(merchant_id, invoice_id, line_id): - data = request_data(models.Product) - +@signature(models.InvoiceLine, str, str, str, body=models.InvoiceLine) +def update_invoice_line(merchant_id, invoice_id, line_id, body): row = central_api.update_invoice_line( request.environ['context'], line_id, - data) + body.as_dict()) - return render(models.Product.from_db(row)) + return models.Product.from_db(row) @bp.delete('/merchants//invoices//lines/') @@ -536,40 +546,36 @@ def delete_invoice_line(merchant_id, invoice_id, line_id): return render() -@bp.delete('/merchants//invoices/') -def delete_invoice(merchant_id, invoice_id): - central_api.delete_invoice(request.environ['context'], invoice_id) - return render() - - # Subscription @bp.post('/merchants//subscriptions') -def create_subscription(merchant_id): - data = request_data(models.Subscription) - +@signature(models.Subscription, str, body=models.Subscription) +def create_subscription(merchant_id, body): row = central_api.create_subscription( request.environ['context'], - data) + body.to_db()) - return render(models.Subscription.from_db(row)) + return models.Subscription.from_db(row) @bp.get('/merchants//subscriptions') +@signature(models.Subscription, str) def list_subscriptions(merchant_id): rows = central_api.list_subscriptions(request.environ['context']) - return render([models.Subscription.from_db(r) for r in rows]) + return map(models.Subscription.from_db, rows) @bp.get('/merchants//subscriptions/') +@signature(models.Subscription, str, str) def get_subscription(merchant_id, subscription_id): row = central_api.get_subscription(request.environ['context'], subscription_id) - return render(models.Subscription.from_db(row)) + return models.Subscription.from_db(row) @bp.put('/merchants//subscriptions/') +@signature(models.Subscription, str, str, body=models.Subscription) def update_subscription(merchant_id, subscription_id): data = request_data(models.Subscription) @@ -578,7 +584,7 @@ def update_subscription(merchant_id, subscription_id): subscription_id, data) - return render(models.Subscription.from_db(row)) + return models.Subscription.from_db(row) @bp.delete('/merchants//subscriptions/') @@ -590,50 +596,46 @@ def delete_subscription(merchant_id, subscription_id): # Usage -@bp.post('/merchants//subscriptions//usage') -def create_usage(merchant_id, subscription_id): - data = request_data(models.Usage) - +@bp.post('/merchants//usage') +@signature(models.Usage, str, body=models.Usage) +def create_usage(merchant_id, body): row = central_api.create_usage( request.environ['context'], - subscription_id, - data) + body.to_db()) - return render(models.Usage.from_db(row)) + return models.Usage.from_db(row) -@bp.get('/merchants//subscriptions//usage') -def list_usages(merchant_id, subscription_id): +@bp.get('/merchants//usage') +@signature([models.Usage], str) +def list_usages(merchant_id): rows = central_api.list_usages(request.environ['context']) - return render([models.Usage.from_db(r) for r in rows]) + return map(models.Usage.from_db, rows) -@bp.get('/merchants//subscriptions/subscription_id>/usage/' - '') -def get_usage(merchant_id, subscription_id, usage_id): +@bp.get('/merchants//usage/') +@signature([models.Usage], str, str) +def get_usage(merchant_id, usage_id): row = central_api.get_usage(request.environ['context'], usage_id) - return render(models.Invoice.from_db(row)) + return models.Invoice.from_db(row) -@bp.put('/merchants//subscriptions//usage/' - '') -def update_usage(merchant_id, subscription_id, usage_id): - data = request_data(models.Usage) - +@bp.put('/merchants//usage/') +@signature(models.Usage, str, str, body=models.Usage) +def update_usage(merchant_id, usage_id, body): row = central_api.update_usage( request.environ['context'], usage_id, - data) + body.to_db()) - return render(models.Usage.from_db(row)) + return models.Usage.from_db(row) -@bp.delete('/merchants//subscriptions//usage/' - '') -def delete_usage(merchant_id, subscription_id, usage_id): +@bp.delete('/merchants//usage/') +def delete_usage(merchant_id, usage_id): central_api.delete_usage( request.environ['context'], usage_id) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 3eb0f40..d64c79c 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -300,9 +300,8 @@ def delete_subscription(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) # Subscriptions - def create_usage(self, ctxt, subscription_id, values): - return self.call(ctxt, self.make_msg('create_usage', - subscription_id=subscription_id, values=values)) + def create_usage(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_usage', values=values)) def list_usages(self, ctxt, criterion=None): return self.call(ctxt, self.make_msg('list_usages', diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 9fda6a3..6e9f18d 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -277,8 +277,8 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.storage_conn.delete_subscription(ctxt, id_) - def create_usage(self, ctxt, subscription_id, values): - return self.storage_conn.create_usage(ctxt, subscription_id, values) + def create_usage(self, ctxt, values): + return self.storage_conn.create_usage(ctxt, values) def list_usages(self, ctxt, **kw): return self.storage_conn.list_usages(ctxt, **kw) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index bc4a614..9ca0fa3 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -726,17 +726,14 @@ def delete_subscription(self, ctxt, id_): def _usage(self, row): return dict(row) - def create_usage(self, ctxt, subscription_id, values): + def create_usage(self, ctxt, values): """ Add a new Usage :param subscription_id: The Subscription :param values: Values describing the new Subscription """ - subscription = self._get(models.Subscription, subscription_id) - usage = models.Usage(**values) - usage.subscription = subscription self._save(usage) return self._usage(usage) From 26c372a2a273ddc05b5fc129d728e4db54edf227 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 27 Mar 2013 20:49:42 +0000 Subject: [PATCH 068/182] Always return the row --- billingstack/sqlalchemy/api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index dfc4e0f..60854bd 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -141,6 +141,7 @@ def _save(self, row, save=True): row.save(self.session) except exceptions.Duplicate: raise + return row def _list(self, cls=None, query=None, criterion=None): """ From 33914cf87de7f5ce08a1e4b5de531889922a9513 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 27 Mar 2013 20:58:14 +0000 Subject: [PATCH 069/182] Typo fixes and add support for planitems. --- billingstack/api/v1/models.py | 8 +++++++ billingstack/api/v1/resources.py | 24 +++++++++++++++++-- billingstack/central/service.py | 2 +- .../storage/impl_sqlalchemy/__init__.py | 13 ++++++++++ .../storage/impl_sqlalchemy/models.py | 2 -- 5 files changed, 44 insertions(+), 5 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 7311c75..d1538505 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -96,8 +96,16 @@ class Plan(DescribedBase): properties = DictType(key_type=text, value_type=property_type) +class PlanItem(Base): + plan_id = text + product_id = text + + pricing = DictType(key_type=text, value_type=property_type) + + class Product(DescribedBase): properties = DictType(key_type=text, value_type=property_type) + pricing = DictType(key_type=text, value_type=property_type) class InvoiceLine(Base): diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 34bf608..c83880b 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -238,7 +238,7 @@ def create_payment_gateway(merchant_id, body): @bp.get('/merchants//payment-gateways') @signature([models.PGConfig], str) def list_payment_gateways(merchant_id): - rows = central_api.list_pg_config(request.environ['context']) + rows = central_api.list_pg_configs(request.environ['context']) return map(models.PGConfig.from_db, rows) @@ -408,6 +408,26 @@ def delete_plan(merchant_id, plan_id): return render() +# Plan Item +@bp.put('/merchants//plans//items/') +@signature(models.PlanItem, str, str, str) +def add_plan_item(merchant_id, plan_id, product_id): + values = { + 'plan_id': plan_id, + 'product_id': product_id + } + + row = central_api.create_plan_item(request.environ['context'], values) + + return models.PlanItem.from_db(row) + + +@bp.put('/merchants//plans//items/') +def delete_plan_item(merchant_id, plan_id, product_id): + central_api.delete_plan_item(request.environ['context'], + plan_id, product_id) + + # Products @bp.post('/merchants//products') @signature(models.Product, str, body=models.Product) @@ -558,7 +578,7 @@ def create_subscription(merchant_id, body): @bp.get('/merchants//subscriptions') -@signature(models.Subscription, str) +@signature([models.Subscription], str) def list_subscriptions(merchant_id): rows = central_api.list_subscriptions(request.environ['context']) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 6e9f18d..ecc9b1c 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -201,7 +201,7 @@ def delete_plan(self, ctxt, id_): return self.storage_conn.delete_plan(ctxt, id_) def create_plan_item(self, ctxt, values): - return self.storage_conn.create_plan(ctxt, values) + return self.storage_conn.create_plan_item(ctxt, values) def update_plan_item(self, ctxt, id_, values): return self.storage_conn.update_plan_item(ctxt, id_, values) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 9ca0fa3..2f0cb0d 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -461,6 +461,8 @@ def delete_plan(self, ctxt, id_): # PlanItem def create_plan_item(self, ctxt, values, save=True): + import ipdb + ipdb.set_trace() ref = models.PlanItem() return self._update_plan_item(ref, values, save=save) @@ -482,6 +484,17 @@ def get_plan_item(self, ctxt, id_): def delete_plan_item(self, ctxt, id_): self._delete(models.PlanItem, id_) + def remove_plan_product(self, ctxt, plan_id, product_id): + """ + Remove a Product from a Plan by deleting the PlanItem. + + :param plan_id: The Plan's ID. + :param product_id: The Product's ID. + """ + query = self.session.query(models.PlanItem).\ + filter_by(plan_id=plan_id, product_id=product_id) + query.delete() + # Products def _product(self, row): product = dict(row) diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index ae7e6a8..cfebd3c 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -319,8 +319,6 @@ class PlanProperty(BASE, PropertyMixin): class PlanItem(BASE, BaseMixin): - description = Column(Unicode(255)) - pricing = Column(JSON) plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), From 0537e9503cbb8088b8f6f56e85a96c528b307cee Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 27 Mar 2013 22:19:00 +0000 Subject: [PATCH 070/182] Remove IPDB and ensure uniqueness of PlanItem --- billingstack/storage/impl_sqlalchemy/__init__.py | 2 -- billingstack/storage/impl_sqlalchemy/models.py | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 2f0cb0d..264c8c6 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -461,8 +461,6 @@ def delete_plan(self, ctxt, id_): # PlanItem def create_plan_item(self, ctxt, values, save=True): - import ipdb - ipdb.set_trace() ref = models.PlanItem() return self._update_plan_item(ref, values, save=save) diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index cfebd3c..024a013 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -319,6 +319,8 @@ class PlanProperty(BASE, PropertyMixin): class PlanItem(BASE, BaseMixin): + __table_args__ = (UniqueConstraint('plan_id', 'product_id', name='item'),) + pricing = Column(JSON) plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), From 3e794b3456bc95ce3f089c8ba06eb53722d42c80 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 28 Mar 2013 13:44:31 +0000 Subject: [PATCH 071/182] Add filtering alike that Ceilometer uses --- billingstack/api/v1/resources.py | 139 ++++++++++++++++++++----------- 1 file changed, 92 insertions(+), 47 deletions(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index c83880b..b26dd79 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -17,7 +17,7 @@ from flask import request -from billingstack.api.base import Rest, render, request_data +from billingstack.api.base import Rest, Query, render, request_data from billingstack.api.v1 import models from billingstack.central.rpcapi import central_api @@ -39,9 +39,12 @@ def create_currency(body): @bp.get('/currencies') -@signature([models.Currency]) -def list_currencies(): - rows = central_api.list_currencies(request.environ['context']) +@signature([models.Currency], [Query]) +def list_currencies(q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_currencies( + request.environ['context'], criterion=criterion) return map(models.Currency.from_db, rows) @@ -83,9 +86,12 @@ def create_language(body): @bp.get('/languages') -@signature([models.Language]) -def list_languages(): - rows = central_api.list_languages(request.environ['context']) +@signature([models.Language], [Query]) +def list_languages(q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_languages( + request.environ['context'], criterion=criterion) return map(models.Language.from_db, rows) @@ -118,17 +124,23 @@ def delete_language(language_id): # PGP / PGM @bp.get('/payment-gateway-providers') -@signature([models.PGProvider]) -def list_pg_providers(): - rows = central_api.list_pg_providers(request.environ['context']) +@signature([models.PGProvider], [Query]) +def list_pg_providers(q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_pg_providers( + request.environ['context'], criterion=criterion) return map(models.PGProvider.from_db, rows) @bp.get('/payment-gateway-providers//methods') -@signature([models.PGMethod], str) -def list_pg_methods(pgp_id): - rows = central_api.list_pg_methods(request.environ['context']) +@signature([models.PGMethod], str, [Query]) +def list_pg_methods(pgp_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_pg_methods( + request.environ['context'], criterion=criterion) return map(models.PGMethod.from_db, rows) @@ -144,15 +156,18 @@ def create_invoice_state(body): @bp.get('/invoice-states') -@signature([models.InvoiceState]) -def list_invoice_states(): - rows = central_api.list_invoice_states(request.environ['context']) +@signature([models.InvoiceState], [Query]) +def list_invoice_states(q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_invoice_states( + request.environ['context'], criterion=criterion) return map(models.InvoiceState.from_db, rows) @bp.get('/invoice-states/') -@signature(models.InvoiceState, str) +@signature(models.InvoiceState, str,) def get_invoice_state(state_id): row = central_api.get_invoice_state(request.environ['context'], state_id) @@ -190,9 +205,12 @@ def create_merchant(body): @bp.get('/merchants') -@signature([models.Merchant]) -def list_merchants(): - rows = central_api.list_merchants(request.environ['context']) +@signature([models.Merchant], [Query]) +def list_merchants(q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_merchants( + request.environ['context'], criterion=criterion) return map(models.Merchant.from_db, rows) @@ -236,9 +254,12 @@ def create_payment_gateway(merchant_id, body): @bp.get('/merchants//payment-gateways') -@signature([models.PGConfig], str) -def list_payment_gateways(merchant_id): - rows = central_api.list_pg_configs(request.environ['context']) +@signature([models.PGConfig], str, [Query]) +def list_payment_gateways(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_pg_configs( + request.environ['context'], criterion=criterion) return map(models.PGConfig.from_db, rows) @@ -283,9 +304,12 @@ def create_customer(merchant_id, body): @bp.get('/merchants//customers') -@signature([models.Customer], str) -def list_customers(merchant_id): - rows = central_api.list_customers(request.environ['context']) +@signature([models.Customer], str, [Query]) +def list_customers(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_customers( + request.environ['context'], criterion=criterion) return map(models.Customer.from_db, rows) @@ -329,9 +353,12 @@ def create_payment_method(merchant_id, customer_id, body): @bp.get('/merchants//customers//payment-methods') -@signature([models.PaymentMethod], str, str) -def list_payment_methods(merchant_id, customer_id): - rows = central_api.list_payment_methods(request.environ['context']) +@signature([models.PaymentMethod], str, str, [Query]) +def list_payment_methods(merchant_id, customer_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_payment_methods( + request.environ['context'], criterion=criterion) return map(models.PaymentMethod.from_db, rows) @@ -375,9 +402,12 @@ def create_plan(merchant_id, body): @bp.get('/merchants//plans') -@signature([models.Plan], str) -def list_plans(merchant_id): - rows = central_api.list_plans(request.environ['context']) +@signature([models.Plan], str, [Query]) +def list_plans(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_plans( + request.environ['context'], criterion=criterion) return map(models.Plan.from_db, rows) @@ -441,9 +471,12 @@ def create_product(merchant_id, body): @bp.get('/merchants//products') -@signature([models.Product], str) -def list_products(merchant_id): - rows = central_api.list_products(request.environ['context']) +@signature([models.Product], str, [Query]) +def list_products(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_products( + request.environ['context'], criterion=criterion) return map(models.Product.from_db, rows) @@ -487,9 +520,12 @@ def create_invoice(merchant_id, body): @bp.get('/merchants//invoices') -@signature([models.InvoiceState], str) -def list_invoices(merchant_id): - rows = central_api.list_invoices(request.environ['context']) +@signature([models.InvoiceState], str, [Query]) +def list_invoices(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_invoices( + request.environ['context'], criterion=criterion) return map(models.Invoice.from_db, rows) @@ -533,9 +569,12 @@ def create_invoice_line(merchant_id, invoice_id, body): @bp.get('/merchants//invoices//lines') -@signature([models.InvoiceLine], str, str) -def list_invoice_lines(merchant_id, invoice_id): - rows = central_api.list_invoice_lines(request.environ['context']) +@signature([models.InvoiceLine], str, str, [Query]) +def list_invoice_lines(merchant_id, invoice_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_invoice_lines( + request.environ['context'], criterion=criterion) return map(models.Product.from_db, rows) @@ -578,9 +617,12 @@ def create_subscription(merchant_id, body): @bp.get('/merchants//subscriptions') -@signature([models.Subscription], str) -def list_subscriptions(merchant_id): - rows = central_api.list_subscriptions(request.environ['context']) +@signature([models.Subscription], str, [Query]) +def list_subscriptions(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_subscriptions( + request.environ['context'], criterion=criterion) return map(models.Subscription.from_db, rows) @@ -627,9 +669,12 @@ def create_usage(merchant_id, body): @bp.get('/merchants//usage') -@signature([models.Usage], str) -def list_usages(merchant_id): - rows = central_api.list_usages(request.environ['context']) +@signature([models.Usage], str, [Query]) +def list_usages(merchant_id, q=[]): + criterion = [o.as_dict() for o in q] + + rows = central_api.list_usages( + request.environ['context'], criterion=criterion) return map(models.Usage.from_db, rows) From b3c8c0ece80b082cdd063d2fa046dbb15eb3fa6c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 28 Mar 2013 14:42:47 +0000 Subject: [PATCH 072/182] Fixes --- billingstack/api/base.py | 86 ++------------------------------ billingstack/api/v1/resources.py | 42 +++++++--------- 2 files changed, 22 insertions(+), 106 deletions(-) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index b43215b..ba6d577 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -1,17 +1,13 @@ import functools import mimetypes -import traceback -from flask import abort, request, Blueprint, Response +from flask import request, Blueprint from wsme.types import Base, Enum, UserType, text, Unset, wsproperty -from werkzeug.datastructures import MIMEAccept from oslo.config import cfg from billingstack.api import utils from billingstack.openstack.common import log -from billingstack.openstack.common.wsgi import JSONDictSerializer, \ - XMLDictSerializer, JSONDeserializer LOG = log.getLogger(__name__) @@ -180,8 +176,8 @@ def handler(**kw): fields.sort() request.fields_selector = fields - if status: - request.status_code = status + if hasattr(func, '_wsme_definition'): + func._wsme_definition.status_code = status return func(**kw) @@ -193,79 +189,3 @@ def handler(**kw): return func return decorator - - -RT_JSON = MIMEAccept([("application/json", 1)]) -RT_XML = MIMEAccept([("application/xml", 1)]) - - -def render(res=None, resp_type=None, status=None, **kwargs): - if not res: - res = {} - elif isinstance(res, ModelBase): - res = res.as_dict() - elif isinstance(res, list): - new_res = [] - for r in res: - new_res.append(r.as_dict()) - res = new_res - - if isinstance(res, dict): - res.update(kwargs) - elif kwargs: - # can't merge kwargs into the non-dict res - abort_and_log(500, "Non-dict and non-empty kwargs passed to render") - - status_code = getattr(request, 'status_code', None) - if status: - status_code = status - if not status_code: - status_code = 200 - - if not resp_type: - req_resp_type = getattr(request, 'resp_type', None) - resp_type = req_resp_type if req_resp_type else RT_JSON - - serializer = None - if "application/json" in resp_type: - resp_type = RT_JSON - serializer = JSONDictSerializer() - elif "application/xml" in resp_type: - resp_type = RT_XML - serializer = XMLDictSerializer() - else: - abort_and_log(400, "Content type '%s' isn't supported" % resp_type) - - body = serializer.serialize(res) - resp_type = str(resp_type) - return Response(response=body, status=status_code, mimetype=resp_type) - - -def request_data(model): - if not request.content_length > 0: - LOG.debug("Empty body provided in request") - return dict() - - deserializer = None - content_type = request.mimetype - - if not content_type or content_type in RT_JSON: - deserializer = JSONDeserializer() - elif content_type in RT_XML: - abort_and_log(400, "XML requests are not supported yet") - # deserializer = XMLDeserializer() - else: - abort_and_log(400, "Content type '%s' isn't supported" % content_type) - - data = deserializer.deserialize(request.data)['body'] - return model(**data).to_db() - - -def abort_and_log(status_code, descr, exc=None): - LOG.error("Request aborted with status code %s and message '%s'", - status_code, descr) - - if exc is not None: - LOG.error(traceback.format_exc()) - - abort(status_code, description=descr) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index b26dd79..d08a1cc 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -15,9 +15,9 @@ # under the License. from flask import request +from flask import Response - -from billingstack.api.base import Rest, Query, render, request_data +from billingstack.api.base import Rest, Query from billingstack.api.v1 import models from billingstack.central.rpcapi import central_api @@ -31,11 +31,9 @@ @bp.post('/currencies') @signature(models.Currency, body=models.Currency) def create_currency(body): - data = request_data(models.Currency) - - row = central_api.create_currency(request.environ['context'], data) + row = central_api.create_currency( + request.environ['context'], body.to_db()) return models.Currency.from_db(row) - #return render(models.Currency.from_db(row)) @bp.get('/currencies') @@ -72,7 +70,7 @@ def update_currency(currency_id, body): @bp.delete('/currencies/') def delete_currency(currency_id): central_api.delete_currency(request.environ['context'], currency_id) - return render() + return Response(status_code=204) # Language @@ -119,7 +117,7 @@ def update_language(language_id, body): @bp.delete('/languages/') def delete_language(language_id): central_api.delete_language(request.environ['context'], language_id) - return render() + return Response(status_code=204) # PGP / PGM @@ -191,7 +189,7 @@ def delete_invoice_state(state_id): central_api.delete_invoice_state( request.environ['context'], state_id) - return render() + return Response(status_code=204) # merchants @@ -238,7 +236,7 @@ def update_merchant(merchant_id, body): @bp.delete('/merchants/') def delete_merchant(merchant_id): central_api.delete_merchant(request.environ['context'], merchant_id) - return render() + return Response(status_code=204) # Invoices @@ -288,7 +286,7 @@ def delete_pg_config(merchant_id, pg_config_id): central_api.delete_pg_config( request.environ['context'], pg_config_id) - return render() + return Response(status_code=204) # customers @@ -337,7 +335,7 @@ def update_customer(merchant_id, customer_id, body): @bp.delete('/merchants//customers/') def delete_customer(merchant_id, customer_id): central_api.delete_customer(request.environ['context'], customer_id) - return render() + return Response(status_code=204) # PaymentMethods @@ -386,7 +384,7 @@ def update_payment_method(merchant_id, customer_id, pm_id, body): '') def delete_payment_method(merchant_id, customer_id, pm_id): central_api.delete_payment_method(request.environ['context'], pm_id) - return render() + return Response(status_code=204) # Plans @@ -435,7 +433,7 @@ def update_plan(merchant_id, plan_id, body): @bp.delete('/merchants//plans/') def delete_plan(merchant_id, plan_id): central_api.delete_plan(request.environ['context'], plan_id) - return render() + return Response(status_code=204) # Plan Item @@ -504,7 +502,7 @@ def update_product(merchant_id, product_id, body): @bp.delete('/merchants//products/') def delete_product(merchant_id, product_id): central_api.delete_product(request.environ['context'], product_id) - return render() + return Response(status_code=204) # Invoices @@ -553,7 +551,7 @@ def update_invoice(merchant_id, invoice_id, body): @bp.delete('/merchants//invoices/') def delete_invoice(merchant_id, invoice_id): central_api.delete_invoice(request.environ['context'], invoice_id) - return render() + return Response(status_code=204) # Products @@ -602,7 +600,7 @@ def update_invoice_line(merchant_id, invoice_id, line_id, body): @bp.delete('/merchants//invoices//lines/') def delete_invoice_line(merchant_id, invoice_id, line_id): central_api.delete_invoice_line(request.environ['context'], line_id) - return render() + return Response(status_code=204) # Subscription @@ -638,13 +636,11 @@ def get_subscription(merchant_id, subscription_id): @bp.put('/merchants//subscriptions/') @signature(models.Subscription, str, str, body=models.Subscription) -def update_subscription(merchant_id, subscription_id): - data = request_data(models.Subscription) - +def update_subscription(merchant_id, subscription_id, body): row = central_api.update_subscription( request.environ['context'], subscription_id, - data) + body.to_db()) return models.Subscription.from_db(row) @@ -654,7 +650,7 @@ def delete_subscription(merchant_id, subscription_id): central_api.delete_subscription( request.environ['context'], subscription_id) - return render() + return Response(status_code=204) # Usage @@ -704,4 +700,4 @@ def delete_usage(merchant_id, usage_id): central_api.delete_usage( request.environ['context'], usage_id) - return render() + return Response(status_code=204) From 0515f01bf61392c6673cef1f1d6c1d40c906c628 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 00:00:51 +0000 Subject: [PATCH 073/182] Re-amp filtering and split up Filterer from SQLA --- billingstack/api/v1/resources.py | 43 +++++++++++------ billingstack/exceptions.py | 4 +- billingstack/sqlalchemy/api.py | 82 +++++++------------------------- billingstack/storage/filterer.py | 76 +++++++++++++++++++++++++++++ 4 files changed, 123 insertions(+), 82 deletions(-) create mode 100644 billingstack/storage/filterer.py diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index d08a1cc..d58e2b6 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -27,6 +27,19 @@ bp = Rest('v1', __name__) +def _query_to_criterion(query, storage_func=None): + """ + Iterate over the query checking against the valid signatures (later). + + :param query: A list of queries. + :param storage_func: The name of the storage function to very against. + """ + criterion = {} + for q in query: + criterion[q.field] = q.as_dict() + return criterion + + # Currencies @bp.post('/currencies') @signature(models.Currency, body=models.Currency) @@ -39,7 +52,7 @@ def create_currency(body): @bp.get('/currencies') @signature([models.Currency], [Query]) def list_currencies(q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_currencies( request.environ['context'], criterion=criterion) @@ -86,7 +99,7 @@ def create_language(body): @bp.get('/languages') @signature([models.Language], [Query]) def list_languages(q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_languages( request.environ['context'], criterion=criterion) @@ -124,7 +137,7 @@ def delete_language(language_id): @bp.get('/payment-gateway-providers') @signature([models.PGProvider], [Query]) def list_pg_providers(q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_pg_providers( request.environ['context'], criterion=criterion) @@ -135,7 +148,7 @@ def list_pg_providers(q=[]): @bp.get('/payment-gateway-providers//methods') @signature([models.PGMethod], str, [Query]) def list_pg_methods(pgp_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_pg_methods( request.environ['context'], criterion=criterion) @@ -156,7 +169,7 @@ def create_invoice_state(body): @bp.get('/invoice-states') @signature([models.InvoiceState], [Query]) def list_invoice_states(q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_invoice_states( request.environ['context'], criterion=criterion) @@ -205,7 +218,7 @@ def create_merchant(body): @bp.get('/merchants') @signature([models.Merchant], [Query]) def list_merchants(q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_merchants( request.environ['context'], criterion=criterion) @@ -254,7 +267,7 @@ def create_payment_gateway(merchant_id, body): @bp.get('/merchants//payment-gateways') @signature([models.PGConfig], str, [Query]) def list_payment_gateways(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_pg_configs( request.environ['context'], criterion=criterion) @@ -304,7 +317,7 @@ def create_customer(merchant_id, body): @bp.get('/merchants//customers') @signature([models.Customer], str, [Query]) def list_customers(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_customers( request.environ['context'], criterion=criterion) @@ -353,7 +366,7 @@ def create_payment_method(merchant_id, customer_id, body): @bp.get('/merchants//customers//payment-methods') @signature([models.PaymentMethod], str, str, [Query]) def list_payment_methods(merchant_id, customer_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_payment_methods( request.environ['context'], criterion=criterion) @@ -402,7 +415,7 @@ def create_plan(merchant_id, body): @bp.get('/merchants//plans') @signature([models.Plan], str, [Query]) def list_plans(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_plans( request.environ['context'], criterion=criterion) @@ -471,7 +484,7 @@ def create_product(merchant_id, body): @bp.get('/merchants//products') @signature([models.Product], str, [Query]) def list_products(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_products( request.environ['context'], criterion=criterion) @@ -520,7 +533,7 @@ def create_invoice(merchant_id, body): @bp.get('/merchants//invoices') @signature([models.InvoiceState], str, [Query]) def list_invoices(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_invoices( request.environ['context'], criterion=criterion) @@ -569,7 +582,7 @@ def create_invoice_line(merchant_id, invoice_id, body): @bp.get('/merchants//invoices//lines') @signature([models.InvoiceLine], str, str, [Query]) def list_invoice_lines(merchant_id, invoice_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_invoice_lines( request.environ['context'], criterion=criterion) @@ -617,7 +630,7 @@ def create_subscription(merchant_id, body): @bp.get('/merchants//subscriptions') @signature([models.Subscription], str, [Query]) def list_subscriptions(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_subscriptions( request.environ['context'], criterion=criterion) @@ -667,7 +680,7 @@ def create_usage(merchant_id, body): @bp.get('/merchants//usage') @signature([models.Usage], str, [Query]) def list_usages(merchant_id, q=[]): - criterion = [o.as_dict() for o in q] + criterion = _query_to_criterion(q) rows = central_api.list_usages( request.environ['context'], criterion=criterion) diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py index c1fe2f1..e229b90 100644 --- a/billingstack/exceptions.py +++ b/billingstack/exceptions.py @@ -66,11 +66,11 @@ class InvalidSortKey(BadRequest): pass -class InvalidQueryField(Base): +class InvalidQueryField(BadRequest): pass -class InvalidOperator(Base): +class InvalidOperator(BadRequest): pass diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index 60854bd..e526b82 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -11,93 +11,45 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License -import operator from sqlalchemy.orm import exc + from billingstack import exceptions from billingstack.openstack.common import log from billingstack.sqlalchemy import model_base, session, utils +from billingstack.storage.filterer import BaseFilterer LOG = log.getLogger(__name__) -class Filterer(object): - """ - Helper to apply filters... - """ - std_op = [ - (('eq', '==', '='), operator.eq), - (('ne', '!='), operator.ne), - (('ge', '>='), operator.ge), - (('le', '<='), operator.le), - (('gt', '>'), operator.gt), - (('le', '<'), operator.lt) - ] - - def __init__(self, model, query, criterion): - self.model = model - self.query = query - - if isinstance(criterion, dict): - criterion = self.from_dict(criterion) - - self.criterion = criterion - - def from_dict(self, criterion): - """ - Transform a dict with key values to a filter compliant list of dicts. - - :param criterion: The criterion dict. - """ - data = [] - for key, value in criterion.items(): - c = { - 'field': key, - 'value': value, - 'op': 'eq' - } - data.append(c) - return data - - def get_op(self, op_key): - """ - Get the operator. - - :param op_key: The operator key as string. - """ - for op_keys, op in self.std_op: - if op_key in op_keys: - return op - - def apply_criteria(self): +class SQLAFilterer(BaseFilterer): + def apply_criteria(self, query, model): """ Apply the actual criterion in this filterer and return a query with filters applied. """ - query = self.query - LOG.debug('Applying Critera %s' % self.criterion) - for c in self.criterion: + for field, c in self.criterion.items(): # NOTE: Try to get the column try: - col = getattr(self.model, c['field']) + col_obj = getattr(model, field) except AttributeError: - msg = '%s is not a valid field to query by' % c['field'] + msg = '%s is not a valid field to query by' % field raise exceptions.InvalidQueryField(msg) # NOTE: Handle a special operator - std_op = self.get_op(c['op']) - if hasattr(self, c['op']): - getattr(self, c['op'])(c) + std_op = self.get_op(c.op) + if hasattr(self, c.op): + getattr(self, c.op)(c) elif std_op: - query = query.filter(std_op(col, c['value'])) - elif c['op'] in ('%', 'like'): - query = query.filter(col.like(c['value'])) - elif c['op'] in ('!%', 'nlike'): - query = query.filter(col.notlike(c['value'])) + query = query.filter(std_op(col_obj, c.value)) + elif c.op in ('%', 'like'): + query = query.filter(col_obj.like(c.value)) + elif c.op in ('!%', 'nlike'): + query = query.filter(col_obj.notlike(c.value)) else: msg = 'Invalid operator in criteria \'%s\'' % c raise exceptions.InvalidOperator(msg) @@ -159,8 +111,8 @@ def _list(self, cls=None, query=None, criterion=None): query = query or self.session.query(cls) if criterion: - filterer = Filterer(cls, query, criterion) - query = filterer.apply_criteria() + filterer = SQLAFilterer(criterion) + query = filterer.apply_criteria(query, cls) try: result = query.all() diff --git a/billingstack/storage/filterer.py b/billingstack/storage/filterer.py new file mode 100644 index 0000000..1176929 --- /dev/null +++ b/billingstack/storage/filterer.py @@ -0,0 +1,76 @@ +from billingstack import exceptions +from billingstack.openstack.common import log + +import operator + +LOG = log.getLogger(__name__) + + +class Criteria(object): + """ + An object to hold Criteria + """ + def __init__(self, field, op, value): + self.field = field + self.op = op + self.value = value + + @classmethod + def from_dict(cls, data): + return cls(**data) + + +class BaseFilterer(object): + """ + Object to help with Filtering. + + Typical use cases include turning a dict into useful storage backend query + filters. + """ + + std_op = [ + (('eq', '==', '='), operator.eq), + (('ne', '!='), operator.ne), + (('ge', '>='), operator.ge), + (('le', '<='), operator.le), + (('gt', '>'), operator.gt), + (('le', '<'), operator.lt) + ] + + def __init__(self, criterion, **kw): + #: Criterion to apply + self.criterion = self.load_criterion(criterion) + + def get_op(self, op_key): + """ + Get the operator. + + :param op_key: The operator key as string. + """ + for op_keys, op in self.std_op: + if op_key in op_keys: + return op + + def load_criterion(self, criterion): + """ + Transform a dict with key values to a filter compliant list of dicts. + + :param criterion: The criterion dict. + """ + if not isinstance(criterion, dict): + msg = 'Criterion needs to be a dict.' + LOG.debug(msg) + raise exceptions.InvalidObject(msg) + + data = {} + for key, value in criterion.items(): + # NOTE: Criteria that doesn't have a OP defaults to eq and handle + # dicts + if isinstance(value, basestring): + c = Criteria(key, 'eq', value) + elif isinstance(value, dict): + import ipdb + ipdb.set_trace() + c = Criteria.from_dict(value) + data[key] = c + return data From 08ea9022d404f1cf6cb36e23ef06a54e10d2bab9 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 00:02:43 +0000 Subject: [PATCH 074/182] Actually needs to set the query --- billingstack/sqlalchemy/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index e526b82..9a83512 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -43,7 +43,7 @@ def apply_criteria(self, query, model): # NOTE: Handle a special operator std_op = self.get_op(c.op) if hasattr(self, c.op): - getattr(self, c.op)(c) + query = getattr(self, c.op)(c) elif std_op: query = query.filter(std_op(col_obj, c.value)) elif c.op in ('%', 'like'): From 05505fc531ebf897cfdae685e65e8a1f327a9bd2 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 00:43:26 +0000 Subject: [PATCH 075/182] Add filtering docs --- doc/source/resources/api_filtering.rst | 104 +++++++++++++++++++++++++ doc/source/resources/index.rst | 1 + 2 files changed, 105 insertions(+) create mode 100644 doc/source/resources/api_filtering.rst diff --git a/doc/source/resources/api_filtering.rst b/doc/source/resources/api_filtering.rst new file mode 100644 index 0000000..f7c2f93 --- /dev/null +++ b/doc/source/resources/api_filtering.rst @@ -0,0 +1,104 @@ +.. + Copyright 2013 Endre Karlson + Copyright 2013 Luis Gervaso + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + +.. _filtering: + + +========================================== +Filtering in the API (Internally and REST) +========================================== + +.. index:: + double: api_filtering; brief + + +Filtering Operators ++++++++++++++++++++ + +.. note:: Some storage plugins may not support all operatirs. + + +================= =========== +Name Operators +================= =========== +Equals eq, ==, == +Not Equals ne, != +Greater or equal le, >= +Less or equal le, <= +Greater than >, gt +Less than <, lt +Like like +Not Like nlike +================= =========== + + +Filtering in REST API ++++++++++++++++++++++ + +You can filter using "query" parameters in the URL which works very much like +doing it in other places. + +For example querying for Merchants with a name that starts with 'Cloud' you can do it like the below. + +.. code:: + + http://localhost:9091/v1/merchants?q.field=name&q.op=like&q.value=Cloud% + + +Results in a internal criteria of: + +.. code:: + + {'name': {'field': 'name', 'op': 'like', 'value': 'Cloud%'}} + + +You can also pass multi field / value queries (Same as above but also language) + +.. code:: + + http://localhost:9091/v1/merchants?q.field=lang&q.field=name&q.op=eq&q.op=like&q.value=nor&q.value=Cloud% + + +Results in a internal critera of: + +.. code:: + + { + 'name': { + 'field': 'name', 'op': 'like', 'value': 'Cloud%' + }, + 'language': { + 'field': 'language', 'op': 'eq', 'value': 'nor' + } + } + +The Params in the URL are parsed to something usable by each service that it's +sent to. + + +Filtering internally +++++++++++++++++++++ + +Filtering internally when for example doing a call directly on a api method +or towards a API method that is available over RPC you can pass Criterion dicts +like mentioned above in the "Results in internal criteria of....". + +Basically it boils down to something like: + +.. code:: + + {'fieldname': 'value'} + {'fieldname': {'op': 'eq', 'value': 'value'}} \ No newline at end of file diff --git a/doc/source/resources/index.rst b/doc/source/resources/index.rst index cc50083..e9bcc0a 100644 --- a/doc/source/resources/index.rst +++ b/doc/source/resources/index.rst @@ -22,4 +22,5 @@ Resources in Billingstack .. toctree:: :maxdepth: 2 + api_filtering subscriptions \ No newline at end of file From 4e0c3894bcd90c765c85315ed8b16d44dcb258e9 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 01:02:32 +0000 Subject: [PATCH 076/182] Remove ipdb --- billingstack/storage/filterer.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/billingstack/storage/filterer.py b/billingstack/storage/filterer.py index 1176929..0763648 100644 --- a/billingstack/storage/filterer.py +++ b/billingstack/storage/filterer.py @@ -69,8 +69,6 @@ def load_criterion(self, criterion): if isinstance(value, basestring): c = Criteria(key, 'eq', value) elif isinstance(value, dict): - import ipdb - ipdb.set_trace() c = Criteria.from_dict(value) data[key] = c return data From ffc3f37f0026d8759fbd3b237a61af64fb4d5d1a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 01:51:47 +0000 Subject: [PATCH 077/182] Fixup plan_items CRUD --- billingstack/api/v1/models.py | 13 ++-- billingstack/api/v1/resources.py | 67 +++++++++++-------- billingstack/central/rpcapi.py | 5 +- billingstack/central/service.py | 4 +- .../storage/impl_sqlalchemy/__init__.py | 14 ++-- 5 files changed, 61 insertions(+), 42 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index d1538505..75544c7 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -92,10 +92,6 @@ class ContactInfo(Base): website = text -class Plan(DescribedBase): - properties = DictType(key_type=text, value_type=property_type) - - class PlanItem(Base): plan_id = text product_id = text @@ -103,6 +99,15 @@ class PlanItem(Base): pricing = DictType(key_type=text, value_type=property_type) +class Plan(DescribedBase): + def __init__(self, **kw): + kw['items'] = map(PlanItem.from_db, kw.pop('items')) + super(Plan, self).__init__(**kw) + + items = [PlanItem] + properties = DictType(key_type=text, value_type=property_type) + + class Product(DescribedBase): properties = DictType(key_type=text, value_type=property_type) pricing = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index d58e2b6..70b355a 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -27,16 +27,24 @@ bp = Rest('v1', __name__) -def _query_to_criterion(query, storage_func=None): +def _query_to_criterion(query, storage_func=None, **kw): """ Iterate over the query checking against the valid signatures (later). :param query: A list of queries. :param storage_func: The name of the storage function to very against. """ + translation = { + 'customer_id': 'customer' + } + criterion = {} for q in query: - criterion[q.field] = q.as_dict() + key = translation.get(q.field, q.field) + criterion[key] = q.as_dict() + + criterion.update(kw) + return criterion @@ -83,7 +91,7 @@ def update_currency(currency_id, body): @bp.delete('/currencies/') def delete_currency(currency_id): central_api.delete_currency(request.environ['context'], currency_id) - return Response(status_code=204) + return Response(status=204) # Language @@ -130,7 +138,7 @@ def update_language(language_id, body): @bp.delete('/languages/') def delete_language(language_id): central_api.delete_language(request.environ['context'], language_id) - return Response(status_code=204) + return Response(status=204) # PGP / PGM @@ -145,10 +153,10 @@ def list_pg_providers(q=[]): return map(models.PGProvider.from_db, rows) -@bp.get('/payment-gateway-providers//methods') +@bp.get('/payment-gateway-providers//methods') @signature([models.PGMethod], str, [Query]) -def list_pg_methods(pgp_id, q=[]): - criterion = _query_to_criterion(q) +def list_pg_methods(provider_id, q=[]): + criterion = _query_to_criterion(q, provider_id=provider_id) rows = central_api.list_pg_methods( request.environ['context'], criterion=criterion) @@ -202,7 +210,7 @@ def delete_invoice_state(state_id): central_api.delete_invoice_state( request.environ['context'], state_id) - return Response(status_code=204) + return Response(status=204) # merchants @@ -249,7 +257,7 @@ def update_merchant(merchant_id, body): @bp.delete('/merchants/') def delete_merchant(merchant_id): central_api.delete_merchant(request.environ['context'], merchant_id) - return Response(status_code=204) + return Response(status=204) # Invoices @@ -267,7 +275,7 @@ def create_payment_gateway(merchant_id, body): @bp.get('/merchants//payment-gateways') @signature([models.PGConfig], str, [Query]) def list_payment_gateways(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_pg_configs( request.environ['context'], criterion=criterion) @@ -299,7 +307,7 @@ def delete_pg_config(merchant_id, pg_config_id): central_api.delete_pg_config( request.environ['context'], pg_config_id) - return Response(status_code=204) + return Response(status=204) # customers @@ -317,7 +325,7 @@ def create_customer(merchant_id, body): @bp.get('/merchants//customers') @signature([models.Customer], str, [Query]) def list_customers(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_customers( request.environ['context'], criterion=criterion) @@ -348,7 +356,7 @@ def update_customer(merchant_id, customer_id, body): @bp.delete('/merchants//customers/') def delete_customer(merchant_id, customer_id): central_api.delete_customer(request.environ['context'], customer_id) - return Response(status_code=204) + return Response(status=204) # PaymentMethods @@ -366,7 +374,8 @@ def create_payment_method(merchant_id, customer_id, body): @bp.get('/merchants//customers//payment-methods') @signature([models.PaymentMethod], str, str, [Query]) def list_payment_methods(merchant_id, customer_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id, + customer=customer_id) rows = central_api.list_payment_methods( request.environ['context'], criterion=criterion) @@ -397,7 +406,7 @@ def update_payment_method(merchant_id, customer_id, pm_id, body): '') def delete_payment_method(merchant_id, customer_id, pm_id): central_api.delete_payment_method(request.environ['context'], pm_id) - return Response(status_code=204) + return Response(status=204) # Plans @@ -415,7 +424,7 @@ def create_plan(merchant_id, body): @bp.get('/merchants//plans') @signature([models.Plan], str, [Query]) def list_plans(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_plans( request.environ['context'], criterion=criterion) @@ -446,7 +455,7 @@ def update_plan(merchant_id, plan_id, body): @bp.delete('/merchants//plans/') def delete_plan(merchant_id, plan_id): central_api.delete_plan(request.environ['context'], plan_id) - return Response(status_code=204) + return Response(status=204) # Plan Item @@ -463,10 +472,11 @@ def add_plan_item(merchant_id, plan_id, product_id): return models.PlanItem.from_db(row) -@bp.put('/merchants//plans//items/') +@bp.delete('/merchants//plans//items/') def delete_plan_item(merchant_id, plan_id, product_id): central_api.delete_plan_item(request.environ['context'], plan_id, product_id) + return Response(status=204) # Products @@ -484,7 +494,7 @@ def create_product(merchant_id, body): @bp.get('/merchants//products') @signature([models.Product], str, [Query]) def list_products(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_products( request.environ['context'], criterion=criterion) @@ -515,7 +525,7 @@ def update_product(merchant_id, product_id, body): @bp.delete('/merchants//products/') def delete_product(merchant_id, product_id): central_api.delete_product(request.environ['context'], product_id) - return Response(status_code=204) + return Response(status=204) # Invoices @@ -533,7 +543,7 @@ def create_invoice(merchant_id, body): @bp.get('/merchants//invoices') @signature([models.InvoiceState], str, [Query]) def list_invoices(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_invoices( request.environ['context'], criterion=criterion) @@ -564,7 +574,7 @@ def update_invoice(merchant_id, invoice_id, body): @bp.delete('/merchants//invoices/') def delete_invoice(merchant_id, invoice_id): central_api.delete_invoice(request.environ['context'], invoice_id) - return Response(status_code=204) + return Response(status=204) # Products @@ -582,7 +592,8 @@ def create_invoice_line(merchant_id, invoice_id, body): @bp.get('/merchants//invoices//lines') @signature([models.InvoiceLine], str, str, [Query]) def list_invoice_lines(merchant_id, invoice_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id, + invoice_id=invoice_id) rows = central_api.list_invoice_lines( request.environ['context'], criterion=criterion) @@ -613,7 +624,7 @@ def update_invoice_line(merchant_id, invoice_id, line_id, body): @bp.delete('/merchants//invoices//lines/') def delete_invoice_line(merchant_id, invoice_id, line_id): central_api.delete_invoice_line(request.environ['context'], line_id) - return Response(status_code=204) + return Response(status=204) # Subscription @@ -630,7 +641,7 @@ def create_subscription(merchant_id, body): @bp.get('/merchants//subscriptions') @signature([models.Subscription], str, [Query]) def list_subscriptions(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_subscriptions( request.environ['context'], criterion=criterion) @@ -663,7 +674,7 @@ def delete_subscription(merchant_id, subscription_id): central_api.delete_subscription( request.environ['context'], subscription_id) - return Response(status_code=204) + return Response(status=204) # Usage @@ -680,7 +691,7 @@ def create_usage(merchant_id, body): @bp.get('/merchants//usage') @signature([models.Usage], str, [Query]) def list_usages(merchant_id, q=[]): - criterion = _query_to_criterion(q) + criterion = _query_to_criterion(q, merchant_id=merchant_id) rows = central_api.list_usages( request.environ['context'], criterion=criterion) @@ -713,4 +724,4 @@ def delete_usage(merchant_id, usage_id): central_api.delete_usage( request.environ['context'], usage_id) - return Response(status_code=204) + return Response(status=204) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index d64c79c..8978e51 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -220,8 +220,9 @@ def update_plan_item(self, ctxt, id_, values): return self.call(ctxt, self.make_msg('update_plan_item', id_=id_, values=values)) - def delete_plan_item(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_plan_item', id_=id_)) + def delete_plan_item(self, ctxt, plan_id, product_id): + return self.call(ctxt, self.make_msg('delete_plan_item', + plan_id=plan_id, product_id=product_id)) # Products def create_product(self, ctxt, merchant_id, values): diff --git a/billingstack/central/service.py b/billingstack/central/service.py index ecc9b1c..2cc9e05 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -212,8 +212,8 @@ def list_plan_items(self, ctxt, **kw): def get_plan_item(self, ctxt, id_): return self.storage_conn.get_plan_item(ctxt, id_) - def delete_plan_item(self, ctxt, id_): - return self.storage_conn.delete_plan_item(ctxt, id_) + def delete_plan_item(self, ctxt, plan_id, product_id): + return self.storage_conn.delete_plan_item(ctxt, plan_id, product_id) def create_product(self, ctxt, merchant_id, values): return self.storage_conn.create_product(ctxt, merchant_id, values) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 264c8c6..b8023bd 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -388,7 +388,7 @@ def _plan(self, row): plan['properties'] = self._kv_rows(row.properties, func=lambda i: i['value']) - plan['plan_items'] = map(dict, row.plan_items) if row.plan_items\ + plan['items'] = map(dict, row.plan_items) if row.plan_items\ else [] return plan @@ -479,10 +479,7 @@ def get_plan_item(self, ctxt, id_): row = self._get(models.PlanItem, id_) return dict(row) - def delete_plan_item(self, ctxt, id_): - self._delete(models.PlanItem, id_) - - def remove_plan_product(self, ctxt, plan_id, product_id): + def delete_plan_item(self, ctxt, plan_id, product_id): """ Remove a Product from a Plan by deleting the PlanItem. @@ -491,7 +488,12 @@ def remove_plan_product(self, ctxt, plan_id, product_id): """ query = self.session.query(models.PlanItem).\ filter_by(plan_id=plan_id, product_id=product_id) - query.delete() + + count = query.delete() + if count == 0: + msg = 'Couldn\'t match plan_id %s or product_id %s' % ( + plan_id, product_id) + raise exceptions.NotFound(msg) # Products def _product(self, row): From 1d6381e44bfc5d623901b9bda4bc69d1181e4a98 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 02:00:40 +0000 Subject: [PATCH 078/182] Pass empty list --- billingstack/api/v1/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 75544c7..3d6ec62 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -101,7 +101,7 @@ class PlanItem(Base): class Plan(DescribedBase): def __init__(self, **kw): - kw['items'] = map(PlanItem.from_db, kw.pop('items')) + kw['items'] = map(PlanItem.from_db, kw.pop('items', [])) super(Plan, self).__init__(**kw) items = [PlanItem] From 477a0d318e43c93beab1f9892ec91aeb8194843c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 02:33:58 +0000 Subject: [PATCH 079/182] Fix translate of customer id and add joins for filtering on merchant_id in some places --- billingstack/api/v1/resources.py | 4 +-- .../storage/impl_sqlalchemy/__init__.py | 30 ++++++++++++++++--- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 70b355a..8670ca7 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -35,7 +35,7 @@ def _query_to_criterion(query, storage_func=None, **kw): :param storage_func: The name of the storage function to very against. """ translation = { - 'customer_id': 'customer' + 'customer': 'customer_id' } criterion = {} @@ -375,7 +375,7 @@ def create_payment_method(merchant_id, customer_id, body): @signature([models.PaymentMethod], str, str, [Query]) def list_payment_methods(merchant_id, customer_id, q=[]): criterion = _query_to_criterion(q, merchant_id=merchant_id, - customer=customer_id) + customer_id=customer_id) rows = central_api.list_payment_methods( request.environ['context'], criterion=criterion) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index b8023bd..ae4dfe2 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -31,6 +31,16 @@ cfg.CONF.register_opts(SQLOPTS, group='storage:sqlalchemy') +def filter_merchant_by_join(query, cls, criterion): + if criterion and 'merchant_id' in criterion: + merchant_id = criterion.pop('merchant_id') + if not hasattr(cls, 'merchant_id'): + raise RuntimeError('No merchant_id attribute on %s' % cls) + + query = query.join(cls).filter(cls.merchant_id == merchant_id) + return query + + class SQLAlchemyStorage(base.StorageEngine): __plugin_name__ = 'sqlalchemy' @@ -307,8 +317,14 @@ def create_payment_method(self, ctxt, customer_id, values): self._save(row) return self._dict(row, extra=['provider_method']) - def list_payment_methods(self, ctxt, **kw): - rows = self._list(models.PaymentMethod, **kw) + def list_payment_methods(self, ctxt, criterion=None, **kw): + query = self.session.query(models.PaymentMethod) + + query = filter_merchant_by_join(query, models.Customer, criterion) + + rows = self._list(query=query, cls=models.PaymentMethod, + criterion=criterion, **kw) + return [self._dict(row, extra=['provider_method']) for row in rows] def get_payment_method(self, ctxt, id_, **kw): @@ -696,13 +712,19 @@ def create_subscription(self, ctxt, values): self._save(subscription) return self._subscription(subscription) - def list_subscriptions(self, ctxt, **kw): + def list_subscriptions(self, ctxt, criterion=None, **kw): """ List Subscriptions :param merchant_id: The Merchant to list it for """ - rows = self._list(models.Subscription, **kw) + query = self.session.query(models.Subscription) + + query = filter_merchant_by_join(query, models.Customer, criterion) + + rows = self._list(query=query, cls=models.Subscription, + criterion=criterion, **kw) + return map(self._subscription, rows) def get_subscription(self, ctxt, id_): From ca5e3bbd4e9087cc5b00b5625122965e53ef95c7 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 21:38:19 +0000 Subject: [PATCH 080/182] For debugging --- billingstack/storage/filterer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/billingstack/storage/filterer.py b/billingstack/storage/filterer.py index 0763648..aecf248 100644 --- a/billingstack/storage/filterer.py +++ b/billingstack/storage/filterer.py @@ -19,6 +19,10 @@ def __init__(self, field, op, value): def from_dict(cls, data): return cls(**data) + def __str__(self): + return u'Field: %s, Operation: %s, Value: %s' % ( + self.field, self.op, self.value) + class BaseFilterer(object): """ From 81f03a2217ea68b592f2efa897c56e1edbf21aed Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 21:38:48 +0000 Subject: [PATCH 081/182] Wrong indent --- billingstack/sqlalchemy/api.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index 9a83512..b18059f 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -30,8 +30,6 @@ def apply_criteria(self, query, model): Apply the actual criterion in this filterer and return a query with filters applied. """ - LOG.debug('Applying Critera %s' % self.criterion) - for field, c in self.criterion.items(): # NOTE: Try to get the column try: @@ -54,7 +52,7 @@ def apply_criteria(self, query, model): msg = 'Invalid operator in criteria \'%s\'' % c raise exceptions.InvalidOperator(msg) - return query + return query class HelpersMixin(object): From cf144d438a8c951b91d99e8d67d6ec00ec8567ee Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 21:39:14 +0000 Subject: [PATCH 082/182] Fix up serialization of entities. --- billingstack/api/v1/models.py | 4 +-- .../storage/impl_sqlalchemy/__init__.py | 26 ++++++++++++------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 3d6ec62..3ec6f36 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -96,7 +96,7 @@ class PlanItem(Base): plan_id = text product_id = text - pricing = DictType(key_type=text, value_type=property_type) + pricing = [DictType(key_type=text, value_type=property_type)] class Plan(DescribedBase): @@ -110,7 +110,7 @@ def __init__(self, **kw): class Product(DescribedBase): properties = DictType(key_type=text, value_type=property_type) - pricing = DictType(key_type=text, value_type=property_type) + pricing = [DictType(key_type=text, value_type=property_type)] class InvoiceLine(Base): diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index ae4dfe2..e30aaf2 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -398,13 +398,24 @@ def update_customer(self, ctxt, id_, values): def delete_customer(self, ctxt, id_): return self._delete(models.Customer, id_) + def _entity(self, row): + """ + Helper to serialize a entity like a Product or a Plan + + :param row: The Row. + """ + entity = dict(row) + if hasattr(entity, 'properties'): + entity['properties'] = self._kv_rows( + row.properties, func=lambda i: i['value']) + if hasattr(row, 'pricing'): + entity['pricing'] = row.pricing or [] + return entity + # Plan def _plan(self, row): - plan = dict(row) - - plan['properties'] = self._kv_rows(row.properties, - func=lambda i: i['value']) - plan['items'] = map(dict, row.plan_items) if row.plan_items\ + plan = self._entity(row) + plan['items'] = map(self._entity, row.plan_items) if row.plan_items\ else [] return plan @@ -513,10 +524,7 @@ def delete_plan_item(self, ctxt, plan_id, product_id): # Products def _product(self, row): - product = dict(row) - - product['properties'] = self._kv_rows(row.properties, - func=lambda i: i['value']) + product = self._entity(row) return product def create_product(self, ctxt, merchant_id, values): From 8054b74dde636817a697d057d045d270e9e811c4 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 22:47:04 +0000 Subject: [PATCH 083/182] Make _get use Filterer --- billingstack/sqlalchemy/api.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index b18059f..b123b71 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -130,13 +130,13 @@ def _filter_id(self, cls, identifier, by_name): :param by_name: By name. """ if hasattr(cls, 'id') and utils.is_valid_id(identifier): - return cls.id == identifier + return {'id': identifier} elif hasattr(cls, 'name') and by_name: - return cls.name == identifier + return {'name': identifier} else: raise exceptions.NotFound('No criterias matched') - def _get(self, cls, identifier, by_name=False): + def _get(self, cls, identifier=None, criterion=None, by_name=False): """ Get an instance of a Model matching ID @@ -144,9 +144,18 @@ def _get(self, cls, identifier, by_name=False): :param identifier: The ID to get :param by_name: Search by name as well as ID """ - id_filter = self._filter_id(cls, identifier, by_name) + criterion_ = {} - query = self.session.query(cls).filter(id_filter) + if identifier: + criterion_.update(self._filter_id(cls, identifier, by_name)) + + if isinstance(criterion, dict): + criterion_.update(criterion) + + query = self.session.query(cls) + + filterer = SQLAFilterer(criterion_) + query = filterer.apply_criteria(query, cls) try: obj = query.one() From e15e7a97aa6016e6d95ee5d93ebee272455ddf0a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 22:47:22 +0000 Subject: [PATCH 084/182] Add PATCH --- billingstack/api/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/billingstack/api/base.py b/billingstack/api/base.py index ba6d577..9f8d036 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -129,6 +129,9 @@ def get(self, rule, status_code=200, **kw): def post(self, rule, status_code=202, **kw): return self._mroute('POST', rule, status_code, **kw) + def patch(self, rule, status_code=202, **kw): + return self._mroute('PATCH', rule, status_code, **kw) + def put(self, rule, status_code=202, **kw): return self._mroute('PUT', rule, status_code, **kw) From b0091a35cb558fbe0162b2ebe6ba2b87b96b9f4a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 29 Mar 2013 23:44:45 +0000 Subject: [PATCH 085/182] Plan item fixes --- billingstack/api/v1/models.py | 7 ++-- billingstack/central/rpcapi.py | 10 +++-- billingstack/central/service.py | 11 +++--- .../storage/impl_sqlalchemy/__init__.py | 37 ++++++++----------- billingstack/tests/storage/__init__.py | 13 +------ 5 files changed, 33 insertions(+), 45 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 3ec6f36..c3361b3 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -67,7 +67,7 @@ class PGMethod(DescribedBase): class PGProvider(DescribedBase): def __init__(self, **kw): - kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', {})] + kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', [])] super(PGProvider, self).__init__(**kw) methods = [PGMethod] @@ -92,7 +92,7 @@ class ContactInfo(Base): website = text -class PlanItem(Base): +class PlanItem(ModelBase): plan_id = text product_id = text @@ -101,7 +101,8 @@ class PlanItem(Base): class Plan(DescribedBase): def __init__(self, **kw): - kw['items'] = map(PlanItem.from_db, kw.pop('items', [])) + if 'items' in kw: + kw['items'] = map(PlanItem.from_db, kw.pop('items')) super(Plan, self).__init__(**kw) items = [PlanItem] diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 8978e51..829089b 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -213,11 +213,13 @@ def list_plan_items(self, ctxt, criterion=None): return self.call(ctxt, self.make_msg('list_plan_items', criterion=criterion)) - def get_plan_item(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan_item', id_=id_)) + def get_plan_item(self, ctxt, plan_id, product_id): + return self.call(ctxt, self.make_msg('get_plan_item', + plan_id=plan_id, product_id=product_id)) - def update_plan_item(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_plan_item', id_=id_, + def update_plan_item(self, ctxt, plan_id, product_id, values): + return self.call(ctxt, self.make_msg('update_plan_item', + plan_id=plan_id, product_id=product_id, values=values)) def delete_plan_item(self, ctxt, plan_id, product_id): diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 2cc9e05..43f4a9b 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -203,14 +203,15 @@ def delete_plan(self, ctxt, id_): def create_plan_item(self, ctxt, values): return self.storage_conn.create_plan_item(ctxt, values) - def update_plan_item(self, ctxt, id_, values): - return self.storage_conn.update_plan_item(ctxt, id_, values) - def list_plan_items(self, ctxt, **kw): return self.storage_conn.list_plan_items(ctxt, **kw) - def get_plan_item(self, ctxt, id_): - return self.storage_conn.get_plan_item(ctxt, id_) + def get_plan_item(self, ctxt, plan_id, product_id): + return self.storage_conn.get_plan_item(ctxt, plan_id, product_id) + + def update_plan_item(self, ctxt, plan_id, product_id, values): + return self.storage_conn.update_plan_item( + ctxt, plan_id, product_id, values) def delete_plan_item(self, ctxt, plan_id, product_id): return self.storage_conn.delete_plan_item(ctxt, plan_id, product_id) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index e30aaf2..03b85f8 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -405,7 +405,7 @@ def _entity(self, row): :param row: The Row. """ entity = dict(row) - if hasattr(entity, 'properties'): + if hasattr(row, 'properties'): entity['properties'] = self._kv_rows( row.properties, func=lambda i: i['value']) if hasattr(row, 'pricing'): @@ -428,7 +428,6 @@ def create_plan(self, ctxt, merchant_id, values): """ merchant = self._get(models.Merchant, merchant_id) - items = values.pop('plan_items', []) properties = values.pop('properties', {}) plan = models.Plan(**values) @@ -436,10 +435,6 @@ def create_plan(self, ctxt, merchant_id, values): plan.merchant = merchant self.set_properties(plan, properties) - for i in items: - item_row = self.create_plan_item(ctxt, i, save=False) - plan.plan_items.append(item_row) - self._save(plan) return self._plan(plan) @@ -486,25 +481,25 @@ def delete_plan(self, ctxt, id_): """ self._delete(models.Plan, id_) - # PlanItem - def create_plan_item(self, ctxt, values, save=True): - ref = models.PlanItem() - return self._update_plan_item(ref, values, save=save) - - def update_plan_item(self, ctxt, item, values, save=True): - return self._update_plan_item(item, values, save=save) - - def _update_plan_item(self, item, values, save=True): - row = self._get_row(item, models.PlanItem) - row.update(values) - return self._save(row, save=save) + # PlanItemw + def create_plan_item(self, ctxt, values): + row = models.PlanItem(**values) + self._save(row) + return self._entity(row) def list_plan_items(self, ctxt, **kw): return self._list(models.PlanItem, **kw) - def get_plan_item(self, ctxt, id_): - row = self._get(models.PlanItem, id_) - return dict(row) + def get_plan_item(self, ctxt, plan_id, product_id, criterion={}): + criterion.update({'plan_id': plan_id, 'product_id': product_id}) + row = self._get(models.PlanItem, criterion=criterion) + return self._entity(row) + + def update_plan_item(self, ctxt, plan_id, product_id, values): + criterion = {'plan_id': plan_id, 'product_id': product_id} + row = self._get(models.PlanItem, criterion=criterion) + row.update(values) + return self._entity(row) def delete_plan_item(self, ctxt, plan_id, product_id): """ diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index ebee9dc..8f81e5e 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -472,18 +472,7 @@ def test_delete_product_missing(self): self.admin_ctxt, UUID) # Plan - def test_create_plan_with_items(self): - _, p1 = self.create_product(self.merchant['id']) - _, p2 = self.create_product(self.merchant['id']) - - values = { - 'plan_items': [{'product_id': p1['id']}, {'product_id': p2['id']}] - } - - fixture, data = self.create_plan(self.merchant['id'], values=values) - self.assertData(fixture, data) - - def test_create_plan_without_items(self): + def test_create_plan(self): fixture, data = self.create_plan(self.merchant['id']) self.assertData(fixture, data) From 6f10f3d074592d077e9a1dbe9f9994181232778c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 30 Mar 2013 18:15:29 +0000 Subject: [PATCH 086/182] Add name, title and descritpion to PI --- billingstack/api/v1/models.py | 4 ++++ billingstack/storage/impl_sqlalchemy/__init__.py | 10 +++++++++- billingstack/storage/impl_sqlalchemy/models.py | 7 +++++-- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index c3361b3..1cfaf81 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -93,6 +93,10 @@ class ContactInfo(Base): class PlanItem(ModelBase): + name = text + title = text + description = text + plan_id = text product_id = text diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 03b85f8..0fcd22b 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -415,7 +415,7 @@ def _entity(self, row): # Plan def _plan(self, row): plan = self._entity(row) - plan['items'] = map(self._entity, row.plan_items) if row.plan_items\ + plan['items'] = map(self._plan_item, row.plan_items) if row.plan_items\ else [] return plan @@ -482,6 +482,13 @@ def delete_plan(self, ctxt, id_): self._delete(models.Plan, id_) # PlanItemw + def _plan_item(self, row): + entity = self._entity(row) + entity['name'] = row.product.name + entity['title'] = row.title or row.product.title + entity['description'] = row.description or row.product.description + return entity + def create_plan_item(self, ctxt, values): row = models.PlanItem(**values) self._save(row) @@ -499,6 +506,7 @@ def update_plan_item(self, ctxt, plan_id, product_id, values): criterion = {'plan_id': plan_id, 'product_id': product_id} row = self._get(models.PlanItem, criterion=criterion) row.update(values) + self._save(row) return self._entity(row) def delete_plan_item(self, ctxt, plan_id, product_id): diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 024a013..246fb36 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -321,14 +321,17 @@ class PlanProperty(BASE, PropertyMixin): class PlanItem(BASE, BaseMixin): __table_args__ = (UniqueConstraint('plan_id', 'product_id', name='item'),) + title = Column(Unicode(100)) + description = Column(Unicode(255)) + pricing = Column(JSON) plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), - onupdate='CASCADE', nullable=False) + onupdate='CASCADE', primary_key=True) product = relationship('Product', backref='plan_items', uselist=False) product_id = Column(UUID, ForeignKey('product.id', onupdate='CASCADE'), - nullable=False) + primary_key=True) class Product(BASE, BaseMixin): From f1b587ce99a07124f9349d6a0bbc1c084e7bd3c8 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 30 Mar 2013 18:15:51 +0000 Subject: [PATCH 087/182] Patch / update plan item --- billingstack/api/v1/resources.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 8670ca7..57b76c0 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -472,6 +472,15 @@ def add_plan_item(merchant_id, plan_id, product_id): return models.PlanItem.from_db(row) +@bp.patch('/merchants//plans//items/') +@signature(models.PlanItem, str, str, str, body=models.PlanItem) +def update_plan_item(merchant_id, plan_id, product_id, body): + row = central_api.update_plan_item( + request.environ['context'], plan_id, product_id, body.to_db()) + + return models.PlanItem.from_db(row) + + @bp.delete('/merchants//plans//items/') def delete_plan_item(merchant_id, plan_id, product_id): central_api.delete_plan_item(request.environ['context'], From 958a7c481ffb799023be5dcffc5d8f6769179413 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 31 Mar 2013 15:38:49 +0000 Subject: [PATCH 088/182] Change method names --- billingstack/tests/payment_gateway/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/billingstack/tests/payment_gateway/base.py b/billingstack/tests/payment_gateway/base.py index f0b16fa..14b516b 100644 --- a/billingstack/tests/payment_gateway/base.py +++ b/billingstack/tests/payment_gateway/base.py @@ -24,9 +24,9 @@ def setUp(self): def test_create_account(self): expected = self.pgp.create_account(self.customer) - def test_list_account(self): + def test_list_accounts(self): expected = self.pgp.create_account(self.customer) - actual = self.pgp.list_account() + actual = self.pgp.list_accounts() def test_get_account(self): expected = self.pgp.create_account(self.customer) @@ -50,7 +50,7 @@ def pm_create(self): def test_create_payment_method(self): fixture, pm = self.pm_create() - def test_list_payment_method(self): + def test_list_payment_methods(self): fixture, pm = self.pm_create() assert len(self.pgp.list_payment_method(self.customer['id'])) == 1 From 168ecf03377c63d441fa3ab52a96c21187915a3c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 31 Mar 2013 18:23:16 +0000 Subject: [PATCH 089/182] Update common --- billingstack/openstack/common/context.py | 8 +-- .../openstack/common/eventlet_backdoor.py | 2 +- billingstack/openstack/common/exception.py | 2 +- billingstack/openstack/common/excutils.py | 2 +- billingstack/openstack/common/gettextutils.py | 2 +- billingstack/openstack/common/importutils.py | 2 +- billingstack/openstack/common/iniparser.py | 2 +- billingstack/openstack/common/jsonutils.py | 6 --- billingstack/openstack/common/local.py | 2 +- billingstack/openstack/common/lockutils.py | 1 - billingstack/openstack/common/log.py | 46 ++++++++++++----- .../openstack/common/network_utils.py | 2 +- .../openstack/common/notifier/__init__.py | 2 +- billingstack/openstack/common/notifier/api.py | 3 +- .../openstack/common/notifier/log_notifier.py | 2 +- .../common/notifier/no_op_notifier.py | 2 +- .../common/notifier/rabbit_notifier.py | 31 +++++++++--- .../openstack/common/notifier/rpc_notifier.py | 2 +- .../common/notifier/rpc_notifier2.py | 2 +- .../common/notifier/test_notifier.py | 2 +- billingstack/openstack/common/processutils.py | 2 +- billingstack/openstack/common/rpc/amqp.py | 11 ++-- billingstack/openstack/common/rpc/common.py | 33 +++++++----- .../openstack/common/rpc/dispatcher.py | 6 +-- .../openstack/common/rpc/impl_fake.py | 2 +- .../openstack/common/rpc/impl_kombu.py | 6 +-- .../openstack/common/rpc/impl_qpid.py | 12 ++--- billingstack/openstack/common/rpc/impl_zmq.py | 50 ++++++++++++------- .../openstack/common/rpc/matchmaker.py | 4 +- billingstack/openstack/common/rpc/proxy.py | 22 +++++--- billingstack/openstack/common/setup.py | 12 ++++- billingstack/openstack/common/timeutils.py | 18 ++++--- billingstack/openstack/common/version.py | 2 +- bin/billingstack-rpc-zmq-receiver | 2 +- 34 files changed, 191 insertions(+), 114 deletions(-) diff --git a/billingstack/openstack/common/context.py b/billingstack/openstack/common/context.py index dd7dd04..e9cfd73 100644 --- a/billingstack/openstack/common/context.py +++ b/billingstack/openstack/common/context.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -37,9 +37,9 @@ class RequestContext(object): accesses the system, as well as additional request information. """ - def __init__(self, auth_tok=None, user=None, tenant=None, is_admin=False, + def __init__(self, auth_token=None, user=None, tenant=None, is_admin=False, read_only=False, show_deleted=False, request_id=None): - self.auth_tok = auth_tok + self.auth_token = auth_token self.user = user self.tenant = tenant self.is_admin = is_admin @@ -55,7 +55,7 @@ def to_dict(self): 'is_admin': self.is_admin, 'read_only': self.read_only, 'show_deleted': self.show_deleted, - 'auth_token': self.auth_tok, + 'auth_token': self.auth_token, 'request_id': self.request_id} diff --git a/billingstack/openstack/common/eventlet_backdoor.py b/billingstack/openstack/common/eventlet_backdoor.py index 8b81ebf..c0ad460 100644 --- a/billingstack/openstack/common/eventlet_backdoor.py +++ b/billingstack/openstack/common/eventlet_backdoor.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright (c) 2012 Openstack, LLC. +# Copyright (c) 2012 OpenStack Foundation. # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # diff --git a/billingstack/openstack/common/exception.py b/billingstack/openstack/common/exception.py index 96a7bd0..a2fdb66 100644 --- a/billingstack/openstack/common/exception.py +++ b/billingstack/openstack/common/exception.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py index 0c9e1b4..4d00903 100644 --- a/billingstack/openstack/common/excutils.py +++ b/billingstack/openstack/common/excutils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # Copyright 2012, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py index ea6f085..fbaecf9 100644 --- a/billingstack/openstack/common/gettextutils.py +++ b/billingstack/openstack/common/gettextutils.py @@ -26,7 +26,7 @@ import gettext -t = gettext.translation('openstack-common', 'locale', fallback=True) +t = gettext.translation('billingstack', 'locale', fallback=True) def _(msg): diff --git a/billingstack/openstack/common/importutils.py b/billingstack/openstack/common/importutils.py index 9dec764..3bd277f 100644 --- a/billingstack/openstack/common/importutils.py +++ b/billingstack/openstack/common/importutils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/iniparser.py b/billingstack/openstack/common/iniparser.py index 9bf399f..2412844 100644 --- a/billingstack/openstack/common/iniparser.py +++ b/billingstack/openstack/common/iniparser.py @@ -54,7 +54,7 @@ def _split_key_value(self, line): value = value.strip() if ((value and value[0] == value[-1]) and - (value[0] == "\"" or value[0] == "'")): + (value[0] == "\"" or value[0] == "'")): value = value[1:-1] return key.strip(), [value] diff --git a/billingstack/openstack/common/jsonutils.py b/billingstack/openstack/common/jsonutils.py index d457b6c..cadcb80 100644 --- a/billingstack/openstack/common/jsonutils.py +++ b/billingstack/openstack/common/jsonutils.py @@ -38,14 +38,10 @@ import inspect import itertools import json -import logging import xmlrpclib -from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import timeutils -LOG = logging.getLogger(__name__) - def to_primitive(value, convert_instances=False, convert_datetime=True, level=0, max_depth=3): @@ -85,8 +81,6 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, return 'mock' if level > max_depth: - LOG.error(_('Max serialization depth exceeded on object: %d %s'), - level, value) return '?' # The try block may not be necessary after the class check above, diff --git a/billingstack/openstack/common/local.py b/billingstack/openstack/common/local.py index 8bdc837..f1bfc82 100644 --- a/billingstack/openstack/common/local.py +++ b/billingstack/openstack/common/local.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py index 9ac18a5..6fe0610 100644 --- a/billingstack/openstack/common/lockutils.py +++ b/billingstack/openstack/common/lockutils.py @@ -207,7 +207,6 @@ def inner(*args, **kwargs): local_lock_path = tempfile.mkdtemp() if not os.path.exists(local_lock_path): - cleanup_dir = True fileutils.ensure_tree(local_lock_path) # NOTE(mikal): the lock name cannot contain directory diff --git a/billingstack/openstack/common/log.py b/billingstack/openstack/common/log.py index e010517..1203ffd 100644 --- a/billingstack/openstack/common/log.py +++ b/billingstack/openstack/common/log.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. @@ -29,6 +29,7 @@ """ +import ConfigParser import cStringIO import inspect import itertools @@ -87,11 +88,11 @@ metavar='PATH', deprecated_name='logfile', help='(Optional) Name of log file to output to. ' - 'If not set, logging will go to stdout.'), + 'If no default is set, logging will go to stdout.'), cfg.StrOpt('log-dir', deprecated_name='logdir', - help='(Optional) The directory to keep log files in ' - '(will be prepended to --log-file)'), + help='(Optional) The base directory used for relative ' + '--log-file paths'), cfg.BoolOpt('use-syslog', default=False, help='Use syslog for logging.'), @@ -111,9 +112,9 @@ log_opts = [ cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(levelname)s %(name)s ' - '[%(request_id)s %(user)s %(tenant)s] %(instance)s' - '%(message)s', + default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' + '%(name)s [%(request_id)s %(user)s %(tenant)s] ' + '%(instance)s%(message)s', help='format string to use for log messages with context'), cfg.StrOpt('logging_default_format_string', default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' @@ -323,12 +324,32 @@ def logging_excepthook(type, value, tb): return logging_excepthook +class LogConfigError(Exception): + + message = _('Error loading logging config %(log_config)s: %(err_msg)s') + + def __init__(self, log_config, err_msg): + self.log_config = log_config + self.err_msg = err_msg + + def __str__(self): + return self.message % dict(log_config=self.log_config, + err_msg=self.err_msg) + + +def _load_log_config(log_config): + try: + logging.config.fileConfig(log_config) + except ConfigParser.Error, exc: + raise LogConfigError(log_config, str(exc)) + + def setup(product_name): """Setup logging.""" if CONF.log_config: - logging.config.fileConfig(CONF.log_config) + _load_log_config(CONF.log_config) else: - _setup_logging_from_conf(product_name) + _setup_logging_from_conf() sys.excepthook = _create_logging_excepthook(product_name) @@ -362,8 +383,8 @@ def _find_facility_from_conf(): return facility -def _setup_logging_from_conf(product_name): - log_root = getLogger(product_name).logger +def _setup_logging_from_conf(): + log_root = getLogger(None).logger for handler in log_root.handlers: log_root.removeHandler(handler) @@ -401,7 +422,8 @@ def _setup_logging_from_conf(product_name): if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) - handler.setFormatter(LegacyFormatter(datefmt=datefmt)) + else: + handler.setFormatter(LegacyFormatter(datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) diff --git a/billingstack/openstack/common/network_utils.py b/billingstack/openstack/common/network_utils.py index 69f6732..5224e01 100644 --- a/billingstack/openstack/common/network_utils.py +++ b/billingstack/openstack/common/network_utils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2012 OpenStack LLC. +# Copyright 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/notifier/__init__.py b/billingstack/openstack/common/notifier/__init__.py index 482d54e..45c3b46 100644 --- a/billingstack/openstack/common/notifier/__init__.py +++ b/billingstack/openstack/common/notifier/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/notifier/api.py b/billingstack/openstack/common/notifier/api.py index cd9c259..c39ae48 100644 --- a/billingstack/openstack/common/notifier/api.py +++ b/billingstack/openstack/common/notifier/api.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -30,7 +30,6 @@ notifier_opts = [ cfg.MultiStrOpt('notification_driver', default=[], - deprecated_name='list_notifier_drivers', help='Driver or drivers to handle sending notifications'), cfg.StrOpt('default_notification_level', default='INFO', diff --git a/billingstack/openstack/common/notifier/log_notifier.py b/billingstack/openstack/common/notifier/log_notifier.py index cbf5734..a0fcaf9 100644 --- a/billingstack/openstack/common/notifier/log_notifier.py +++ b/billingstack/openstack/common/notifier/log_notifier.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/notifier/no_op_notifier.py b/billingstack/openstack/common/notifier/no_op_notifier.py index ee1ddbd..bc7a56c 100644 --- a/billingstack/openstack/common/notifier/no_op_notifier.py +++ b/billingstack/openstack/common/notifier/no_op_notifier.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/notifier/rabbit_notifier.py b/billingstack/openstack/common/notifier/rabbit_notifier.py index 1d43922..99bdd7b 100644 --- a/billingstack/openstack/common/notifier/rabbit_notifier.py +++ b/billingstack/openstack/common/notifier/rabbit_notifier.py @@ -1,4 +1,4 @@ -# Copyright 2012 Red Hat, Inc. +# Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -14,16 +14,33 @@ # under the License. +from billingstack.openstack.common import cfg +from billingstack.openstack.common import context as req_context from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import log as logging -from billingstack.openstack.common.notifier import rpc_notifier +from billingstack.openstack.common import rpc LOG = logging.getLogger(__name__) +notification_topic_opt = cfg.ListOpt( + 'notification_topics', default=['notifications', ], + help='AMQP topic used for openstack notifications') + +CONF = cfg.CONF +CONF.register_opt(notification_topic_opt) -def notify(context, message): - """Deprecated in Grizzly. Please use rpc_notifier instead.""" - LOG.deprecated(_("The rabbit_notifier is now deprecated." - " Please use rpc_notifier instead.")) - rpc_notifier.notify(context, message) +def notify(context, message): + """Sends a notification to the RabbitMQ""" + if not context: + context = req_context.get_admin_context() + priority = message.get('priority', + CONF.default_notification_level) + priority = priority.lower() + for topic in CONF.notification_topics: + topic = '%s.%s' % (topic, priority) + try: + rpc.notify(context, topic, message) + except Exception, e: + LOG.exception(_("Could not send notification to %(topic)s. " + "Payload=%(message)s"), locals()) diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py index fd96184..ac626e3 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ b/billingstack/openstack/common/notifier/rpc_notifier.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py index a959f74..7261c70 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ b/billingstack/openstack/common/notifier/rpc_notifier2.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/notifier/test_notifier.py b/billingstack/openstack/common/notifier/test_notifier.py index 5e34880..96c1746 100644 --- a/billingstack/openstack/common/notifier/test_notifier.py +++ b/billingstack/openstack/common/notifier/test_notifier.py @@ -1,4 +1,4 @@ -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py index 899d112..d1ef569 100644 --- a/billingstack/openstack/common/processutils.py +++ b/billingstack/openstack/common/processutils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index 8dd6f7f..a032211 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -446,9 +446,11 @@ def _process_data(self, ctxt, version, method, args): connection_pool=self.connection_pool, log_failure=False) except Exception: - LOG.exception(_('Exception during message handling')) - ctxt.reply(None, sys.exc_info(), - connection_pool=self.connection_pool) + # sys.exc_info() is deleted by LOG.exception(). + exc_info = sys.exc_info() + LOG.error(_('Exception during message handling'), + exc_info=exc_info) + ctxt.reply(None, exc_info, connection_pool=self.connection_pool) class MulticallProxyWaiter(object): @@ -496,7 +498,6 @@ def __iter__(self): data = self._dataqueue.get(timeout=self._timeout) result = self._process_data(data) except queue.Empty: - LOG.exception(_('Timed out waiting for RPC response.')) self.done() raise rpc_common.Timeout() except Exception: @@ -663,7 +664,7 @@ def notify(conf, context, topic, msg, connection_pool, envelope): pack_context(msg, context) with ConnectionContext(conf, connection_pool) as conn: if envelope: - msg = rpc_common.serialize_msg(msg, force_envelope=True) + msg = rpc_common.serialize_msg(msg) conn.notify_send(topic, msg) diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py index f15d641..e5011ab 100644 --- a/billingstack/openstack/common/rpc/common.py +++ b/billingstack/openstack/common/rpc/common.py @@ -70,10 +70,6 @@ _MESSAGE_KEY = 'oslo.message' -# TODO(russellb) Turn this on after Grizzly. -_SEND_RPC_ENVELOPE = False - - class RPCException(Exception): message = _("An unknown RPC related exception occurred.") @@ -122,7 +118,25 @@ class Timeout(RPCException): This exception is raised if the rpc_response_timeout is reached while waiting for a response from the remote side. """ - message = _("Timeout while waiting on RPC response.") + message = _('Timeout while waiting on RPC response - ' + 'topic: "%(topic)s", RPC method: "%(method)s" ' + 'info: "%(info)s"') + + def __init__(self, info=None, topic=None, method=None): + """ + :param info: Extra info to convey to the user + :param topic: The topic that the rpc call was sent to + :param rpc_method_name: The name of the rpc method being + called + """ + self.info = info + self.topic = topic + self.method = method + super(Timeout, self).__init__( + None, + info=info or _(''), + topic=topic or _(''), + method=method or _('')) class DuplicateMessageError(RPCException): @@ -237,9 +251,7 @@ def consume_in_thread(self): raise NotImplementedError() def consume_in_thread_group(self, thread_group): - """ - Spawn a thread to handle incoming messages in the supplied - ThreadGroup. + """Spawn a thread to handle incoming messages in the supplied ThreadGroup. Spawn a thread that will be responsible for handling all incoming messages for consumers that were set up on this connection. @@ -456,10 +468,7 @@ def version_is_compatible(imp_version, version): return True -def serialize_msg(raw_msg, force_envelope=False): - if not _SEND_RPC_ENVELOPE and not force_envelope: - return raw_msg - +def serialize_msg(raw_msg): # NOTE(russellb) See the docstring for _RPC_ENVELOPE_VERSION for more # information about this format. msg = {_VERSION_KEY: _RPC_ENVELOPE_VERSION, diff --git a/billingstack/openstack/common/rpc/dispatcher.py b/billingstack/openstack/common/rpc/dispatcher.py index b22de6a..251c9fb 100644 --- a/billingstack/openstack/common/rpc/dispatcher.py +++ b/billingstack/openstack/common/rpc/dispatcher.py @@ -126,13 +126,11 @@ def dispatch(self, ctxt, version, method, **kwargs): rpc_api_version = '1.0' is_compatible = rpc_common.version_is_compatible(rpc_api_version, version) - had_compatible = had_compatible or is_compatible - func = getattr(proxyobj, method) - if not func: + if not hasattr(proxyobj, method): continue if is_compatible: - return func(ctxt, **kwargs) + return getattr(proxyobj, method)(ctxt, **kwargs) if had_compatible: raise AttributeError("No such RPC function '%s'" % method) diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py index 044ab3f..8d66284 100644 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ b/billingstack/openstack/common/rpc/impl_fake.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC +# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index 0bd8df5..b3c2024 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC +# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -624,8 +624,8 @@ def iterconsume(self, limit=None, timeout=None): def _error_callback(exc): if isinstance(exc, socket.timeout): - LOG.exception(_('Timed out waiting for RPC response: %s') % - str(exc)) + LOG.debug(_('Timed out waiting for RPC response: %s') % + str(exc)) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index 9e58da0..356886a 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC +# Copyright 2011 OpenStack Foundation # Copyright 2011 - 2012, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -40,8 +40,8 @@ cfg.StrOpt('qpid_hostname', default='localhost', help='Qpid broker hostname'), - cfg.StrOpt('qpid_port', - default='5672', + cfg.IntOpt('qpid_port', + default=5672, help='Qpid broker port'), cfg.ListOpt('qpid_hosts', default=['$qpid_hostname:$qpid_port'], @@ -320,7 +320,7 @@ def connection_create(self, broker): # Reconnection is done by self.reconnect() self.connection.reconnect = False self.connection.heartbeat = self.conf.qpid_heartbeat - self.connection.protocol = self.conf.qpid_protocol + self.connection.transport = self.conf.qpid_protocol self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay def _register_consumer(self, consumer): @@ -415,8 +415,8 @@ def iterconsume(self, limit=None, timeout=None): def _error_callback(exc): if isinstance(exc, qpid_exceptions.Empty): - LOG.exception(_('Timed out waiting for RPC response: %s') % - str(exc)) + LOG.debug(_('Timed out waiting for RPC response: %s') % + str(exc)) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index bf7fdc7..e27f6ec 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -16,6 +16,7 @@ import os import pprint +import re import socket import sys import types @@ -25,6 +26,7 @@ import greenlet from oslo.config import cfg +from billingstack.openstack.common import excutils from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils @@ -91,8 +93,8 @@ def _serialize(data): try: return jsonutils.dumps(data, ensure_ascii=True) except TypeError: - LOG.error(_("JSON serialization failed.")) - raise + with excutils.save_and_reraise_exception(): + LOG.error(_("JSON serialization failed.")) def _deserialize(data): @@ -219,7 +221,7 @@ def __init__(self, addr, socket_type=None, bind=False): def cast(self, msg_id, topic, data, envelope=False): msg_id = msg_id or 0 - if not (envelope or rpc_common._SEND_RPC_ENVELOPE): + if not envelope: self.outq.send(map(bytes, (msg_id, topic, 'cast', _serialize(data)))) return @@ -293,11 +295,16 @@ def _get_response(self, ctx, proxy, topic, data): def reply(self, ctx, proxy, msg_id=None, context=None, topic=None, msg=None): """Reply to a casted call.""" - # Our real method is curried into msg['args'] + # NOTE(ewindisch): context kwarg exists for Grizzly compat. + # this may be able to be removed earlier than + # 'I' if ConsumerBase.process were refactored. + if type(msg) is list: + payload = msg[-1] + else: + payload = msg - child_ctx = RpcContext.unmarshal(msg[0]) response = ConsumerBase.normalize_reply( - self._get_response(child_ctx, proxy, topic, msg[1]), + self._get_response(ctx, proxy, topic, payload), ctx.replies) LOG.debug(_("Sending reply")) @@ -437,6 +444,8 @@ class ZmqProxy(ZmqBaseReactor): def __init__(self, conf): super(ZmqProxy, self).__init__(conf) + pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) + self.badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) self.topic_proxy = {} @@ -462,6 +471,13 @@ def publisher(waiter): LOG.info(_("Creating proxy for topic: %s"), topic) try: + # The topic is received over the network, + # don't trust this input. + if self.badchars.search(topic) is not None: + emsg = _("Topic contained dangerous characters.") + LOG.warn(emsg) + raise RPCException(emsg) + out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" % (ipc_dir, topic), sock_type, bind=True) @@ -518,9 +534,9 @@ def consume_in_thread(self): ipc_dir, run_as_root=True) utils.execute('chmod', '750', ipc_dir, run_as_root=True) except utils.ProcessExecutionError: - LOG.error(_("Could not create IPC directory %s") % - (ipc_dir, )) - raise + with excutils.save_and_reraise_exception(): + LOG.error(_("Could not create IPC directory %s") % + (ipc_dir, )) try: self.register(consumption_proxy, @@ -528,9 +544,9 @@ def consume_in_thread(self): zmq.PULL, out_bind=True) except zmq.ZMQError: - LOG.error(_("Could not create ZeroMQ receiver daemon. " - "Socket may already be in use.")) - raise + with excutils.save_and_reraise_exception(): + LOG.error(_("Could not create ZeroMQ receiver daemon. " + "Socket may already be in use.")) super(ZmqProxy, self).consume_in_thread() @@ -592,9 +608,6 @@ def consume(self, sock): self.pool.spawn_n(self.process, proxy, ctx, request) - def consume_in_thread_group(self, thread_group): - self.reactor.consume_in_thread_group(thread_group) - class Connection(rpc_common.Connection): """Manages connections and threads.""" @@ -647,6 +660,9 @@ def consume_in_thread(self): _get_matchmaker().start_heartbeat() self.reactor.consume_in_thread() + def consume_in_thread_group(self, thread_group): + self.reactor.consume_in_thread_group(thread_group) + def _cast(addr, context, topic, msg, timeout=None, envelope=False, _msg_id=None): @@ -684,8 +700,8 @@ def _call(addr, context, topic, msg, timeout=None, 'method': '-reply', 'args': { 'msg_id': msg_id, - 'context': mcontext, 'topic': reply_topic, + # TODO(ewindisch): safe to remove mcontext in I. 'msg': [mcontext, msg] } } @@ -760,7 +776,7 @@ def _multi_send(method, context, topic, msg, timeout=None, LOG.warn(_("No matchmaker results. Not casting.")) # While not strictly a timeout, callers know how to handle # this exception and a timeout isn't too big a lie. - raise rpc_common.Timeout, "No match from matchmaker." + raise rpc_common.Timeout(_("No match from matchmaker.")) # This supports brokerless fanout (addresses > 1) for queue in queues: diff --git a/billingstack/openstack/common/rpc/matchmaker.py b/billingstack/openstack/common/rpc/matchmaker.py index 65da043..d6ee67e 100644 --- a/billingstack/openstack/common/rpc/matchmaker.py +++ b/billingstack/openstack/common/rpc/matchmaker.py @@ -35,10 +35,10 @@ default='/etc/nova/matchmaker_ring.json', help='Matchmaker ring file (JSON)'), cfg.IntOpt('matchmaker_heartbeat_freq', - default='300', + default=300, help='Heartbeat frequency'), cfg.IntOpt('matchmaker_heartbeat_ttl', - default='600', + default=600, help='Heartbeat time-to-live.'), ] diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py index 0eaa968..c1a6a02 100644 --- a/billingstack/openstack/common/rpc/proxy.py +++ b/billingstack/openstack/common/rpc/proxy.py @@ -68,16 +68,21 @@ def call(self, context, msg, topic=None, version=None, timeout=None): :param context: The request context :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. + :param version: (Optional) Override the requested API version in this + message. :param timeout: (Optional) A timeout to use when waiting for the response. If no timeout is specified, a default timeout will be used that is usually sufficient. - :param version: (Optional) Override the requested API version in this - message. :returns: The return value from the remote method. """ self._set_version(msg, version) - return rpc.call(context, self._get_topic(topic), msg, timeout) + real_topic = self._get_topic(topic) + try: + return rpc.call(context, real_topic, msg, timeout) + except rpc.common.Timeout as exc: + raise rpc.common.Timeout( + exc.info, real_topic, msg.get('method')) def multicall(self, context, msg, topic=None, version=None, timeout=None): """rpc.multicall() a remote method. @@ -85,17 +90,22 @@ def multicall(self, context, msg, topic=None, version=None, timeout=None): :param context: The request context :param msg: The message to send, including the method and args. :param topic: Override the topic for this message. + :param version: (Optional) Override the requested API version in this + message. :param timeout: (Optional) A timeout to use when waiting for the response. If no timeout is specified, a default timeout will be used that is usually sufficient. - :param version: (Optional) Override the requested API version in this - message. :returns: An iterator that lets you process each of the returned values from the remote method as they arrive. """ self._set_version(msg, version) - return rpc.multicall(context, self._get_topic(topic), msg, timeout) + real_topic = self._get_topic(topic) + try: + return rpc.multicall(context, real_topic, msg, timeout) + except rpc.common.Timeout as exc: + raise rpc.common.Timeout( + exc.info, real_topic, msg.get('method')) def cast(self, context, msg, topic=None, version=None): """rpc.cast() a remote method. diff --git a/billingstack/openstack/common/setup.py b/billingstack/openstack/common/setup.py index 80a0ece..dec74fd 100644 --- a/billingstack/openstack/common/setup.py +++ b/billingstack/openstack/common/setup.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # @@ -149,7 +149,7 @@ def write_git_changelog(): git_dir = _get_git_directory() if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'): if git_dir: - git_log_cmd = 'git --git-dir=%s log --stat' % git_dir + git_log_cmd = 'git --git-dir=%s log' % git_dir changelog = _run_shell_command(git_log_cmd) mailmap = _parse_git_mailmap(git_dir) with open(new_changelog, "w") as changelog_file: @@ -171,6 +171,14 @@ def generate_authors(): " log --format='%aN <%aE>' | sort -u | " "egrep -v '" + jenkins_email + "'") changelog = _run_shell_command(git_log_cmd) + signed_cmd = ("git log --git-dir=" + git_dir + + " | grep -i Co-authored-by: | sort -u") + signed_entries = _run_shell_command(signed_cmd) + if signed_entries: + new_entries = "\n".join( + [signed.split(":", 1)[1].strip() + for signed in signed_entries.split("\n") if signed]) + changelog = "\n".join((changelog, new_entries)) mailmap = _parse_git_mailmap(git_dir) with open(new_authors, 'w') as new_authors_fh: new_authors_fh.write(canonicalize_emails(changelog, mailmap)) diff --git a/billingstack/openstack/common/timeutils.py b/billingstack/openstack/common/timeutils.py index e2c2740..6094365 100644 --- a/billingstack/openstack/common/timeutils.py +++ b/billingstack/openstack/common/timeutils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC. +# Copyright 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -25,18 +25,22 @@ import iso8601 -TIME_FORMAT = "%Y-%m-%dT%H:%M:%S" -PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" +# ISO 8601 extended time format with microseconds +_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' +_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' +PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND -def isotime(at=None): +def isotime(at=None, subsecond=False): """Stringify time in ISO 8601 format""" if not at: at = utcnow() - str = at.strftime(TIME_FORMAT) + st = at.strftime(_ISO8601_TIME_FORMAT + if not subsecond + else _ISO8601_TIME_FORMAT_SUBSECOND) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - str += ('Z' if tz == 'UTC' else tz) - return str + st += ('Z' if tz == 'UTC' else tz) + return st def parse_isotime(timestr): diff --git a/billingstack/openstack/common/version.py b/billingstack/openstack/common/version.py index 007829c..a593ae3 100644 --- a/billingstack/openstack/common/version.py +++ b/billingstack/openstack/common/version.py @@ -1,5 +1,5 @@ -# Copyright 2012 OpenStack LLC +# Copyright 2012 OpenStack Foundation # Copyright 2012-2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/bin/billingstack-rpc-zmq-receiver b/bin/billingstack-rpc-zmq-receiver index 543af5e..77f9fde 100755 --- a/bin/billingstack-rpc-zmq-receiver +++ b/bin/billingstack-rpc-zmq-receiver @@ -1,7 +1,7 @@ #!/usr/bin/env python # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2011 OpenStack LLC +# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain From d0aedbb77a7c95bb9ae96286d386a33f0645688c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 31 Mar 2013 18:25:50 +0000 Subject: [PATCH 090/182] Forgot to add this --- billingstack/openstack/common/utils.py | 140 +++++++++++++++++++++++++ 1 file changed, 140 insertions(+) create mode 100644 billingstack/openstack/common/utils.py diff --git a/billingstack/openstack/common/utils.py b/billingstack/openstack/common/utils.py new file mode 100644 index 0000000..6de5cbe --- /dev/null +++ b/billingstack/openstack/common/utils.py @@ -0,0 +1,140 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack LLC. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +System-level utilities and helper functions. +""" + +import logging +import random +import shlex + +from eventlet import greenthread +from eventlet.green import subprocess + +from billingstack.openstack.common import exception +from billingstack.openstack.common.gettextutils import _ + + +LOG = logging.getLogger(__name__) + + +def int_from_bool_as_string(subject): + """ + Interpret a string as a boolean and return either 1 or 0. + + Any string value in: + ('True', 'true', 'On', 'on', '1') + is interpreted as a boolean True. + + Useful for JSON-decoded stuff and config file parsing + """ + return bool_from_string(subject) and 1 or 0 + + +def bool_from_string(subject): + """ + Interpret a string as a boolean. + + Any string value in: + ('True', 'true', 'On', 'on', 'Yes', 'yes', '1') + is interpreted as a boolean True. + + Useful for JSON-decoded stuff and config file parsing + """ + if isinstance(subject, bool): + return subject + if isinstance(subject, basestring): + if subject.strip().lower() in ('true', 'on', 'yes', '1'): + return True + return False + + +def execute(*cmd, **kwargs): + """ + Helper method to execute command with optional retry. + + :cmd Passed to subprocess.Popen. + :process_input Send to opened process. + :check_exit_code Defaults to 0. Raise exception.ProcessExecutionError + unless program exits with this code. + :delay_on_retry True | False. Defaults to True. If set to True, wait a + short amount of time before retrying. + :attempts How many times to retry cmd. + :run_as_root True | False. Defaults to False. If set to True, + the command is prefixed by the command specified + in the root_helper kwarg. + :root_helper command to prefix all cmd's with + + :raises exception.Error on receiving unknown arguments + :raises exception.ProcessExecutionError + """ + + process_input = kwargs.pop('process_input', None) + check_exit_code = kwargs.pop('check_exit_code', 0) + delay_on_retry = kwargs.pop('delay_on_retry', True) + attempts = kwargs.pop('attempts', 1) + run_as_root = kwargs.pop('run_as_root', False) + root_helper = kwargs.pop('root_helper', '') + if len(kwargs): + raise exception.Error(_('Got unknown keyword args ' + 'to utils.execute: %r') % kwargs) + if run_as_root: + cmd = shlex.split(root_helper) + list(cmd) + cmd = map(str, cmd) + + while attempts > 0: + attempts -= 1 + try: + LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd)) + _PIPE = subprocess.PIPE # pylint: disable=E1101 + obj = subprocess.Popen(cmd, + stdin=_PIPE, + stdout=_PIPE, + stderr=_PIPE, + close_fds=True) + result = None + if process_input is not None: + result = obj.communicate(process_input) + else: + result = obj.communicate() + obj.stdin.close() # pylint: disable=E1101 + _returncode = obj.returncode # pylint: disable=E1101 + if _returncode: + LOG.debug(_('Result was %s') % _returncode) + if (isinstance(check_exit_code, int) and + not isinstance(check_exit_code, bool) and + _returncode != check_exit_code): + (stdout, stderr) = result + raise exception.ProcessExecutionError( + exit_code=_returncode, + stdout=stdout, + stderr=stderr, + cmd=' '.join(cmd)) + return result + except exception.ProcessExecutionError: + if not attempts: + raise + else: + LOG.debug(_('%r failed. Retrying.'), cmd) + if delay_on_retry: + greenthread.sleep(random.randint(20, 200) / 100.0) + finally: + # NOTE(termie): this appears to be necessary to let the subprocess + # call clean something up in between calls, without + # it two execute calls in a row hangs the second one + greenthread.sleep(0) From 3f19f8e0ab88c04fe3712713399e41a9a62e0f14 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 31 Mar 2013 20:50:56 +0000 Subject: [PATCH 091/182] For now let it be nullable --- billingstack/storage/impl_sqlalchemy/models.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 246fb36..175dbfe 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -390,8 +390,7 @@ class Subscription(BASE, BaseMixin): payment_method = relationship('PaymentMethod', backref='subscriptions') payment_method_id = Column(UUID, ForeignKey('payment_method.id', - ondelete='CASCADE', onupdate='CASCADE'), - nullable=False) + ondelete='CASCADE', onupdate='CASCADE')) class Usage(BASE, BaseMixin): From 28e636433635b0a5bc966f13638d8e12d955200f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 4 Apr 2013 09:35:53 +0000 Subject: [PATCH 092/182] Simplify payment architecture --- billingstack/api/v1/models.py | 9 +--- billingstack/api/v1/resources.py | 13 +----- billingstack/payment_gateway/__init__.py | 4 +- billingstack/sqlalchemy/model_base.py | 3 +- .../storage/impl_sqlalchemy/__init__.py | 41 ++++++------------- .../storage/impl_sqlalchemy/models.py | 5 --- .../tests/api/v1/test_payment_method.py | 5 --- billingstack/tests/base.py | 15 ++----- billingstack/tests/storage/__init__.py | 25 +++-------- tools/load_samples.py | 3 -- 10 files changed, 29 insertions(+), 94 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 1cfaf81..1edb269 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -61,16 +61,12 @@ class InvoiceState(DescribedBase): pass -class PGMethod(DescribedBase): - type = text - - class PGProvider(DescribedBase): def __init__(self, **kw): - kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', [])] + #kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', [])] super(PGProvider, self).__init__(**kw) - methods = [PGMethod] + methods = [DictType(key_type=text, value_type=property_type)] properties = DictType(key_type=text, value_type=property_type) @@ -161,7 +157,6 @@ class PaymentMethod(Base): expires = text customer_id = text - provider_method_id = text provider_config_id = text properties = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 57b76c0..83d9e07 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -153,17 +153,6 @@ def list_pg_providers(q=[]): return map(models.PGProvider.from_db, rows) -@bp.get('/payment-gateway-providers//methods') -@signature([models.PGMethod], str, [Query]) -def list_pg_methods(provider_id, q=[]): - criterion = _query_to_criterion(q, provider_id=provider_id) - - rows = central_api.list_pg_methods( - request.environ['context'], criterion=criterion) - - return map(models.PGMethod.from_db, rows) - - # invoice_states @bp.post('/invoice-states') @signature(models.InvoiceState, body=models.InvoiceState) @@ -261,7 +250,7 @@ def delete_merchant(merchant_id): # Invoices -@bp.post('/merchants//invoices') +@bp.post('/merchants//payment-gateways') @signature(models.PGConfig, str, body=models.PGConfig) def create_payment_gateway(merchant_id, body): row = central_api.create_pg_config( diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py index 509687b..6325db4 100644 --- a/billingstack/payment_gateway/__init__.py +++ b/billingstack/payment_gateway/__init__.py @@ -22,9 +22,9 @@ def _register(ep, context, conn): msg = "PaymentGatewayProvider %s doesn't provide any methods - Skipped" LOG.warn(msg, provider.get_plugin_name()) return - + values['methods'] = methods try: - conn.pg_provider_register(context, values, methods=methods) + conn.pg_provider_register(context, values) except exceptions.ConfigurationError: return diff --git a/billingstack/sqlalchemy/model_base.py b/billingstack/sqlalchemy/model_base.py index bbd928f..46f339f 100644 --- a/billingstack/sqlalchemy/model_base.py +++ b/billingstack/sqlalchemy/model_base.py @@ -67,7 +67,8 @@ def __getitem__(self, key): return getattr(self, key) def __iter__(self): - columns = dict(object_mapper(self).columns).keys() + columns = [i.name for i in iter(object_mapper(self).columns) + if not i.name.startswith('_')] # NOTE(russellb): Allow models to specify other keys that can be looked # up, beyond the actual db columns. An example would be the 'name' # property for an Instance. diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 0fcd22b..225ec0e 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -199,10 +199,13 @@ def delete_contact_info(self, ctxt, id_): self._delete(models.ContactInfo, id_) # Payment Gateway Providers - def pg_provider_register(self, ctxt, values, methods=[]): + def pg_provider_register(self, ctxt, values): """ Register a Provider and it's Methods """ + values = values.copy() + methods = values.pop('methods', []) + query = self.session.query(models.PGProvider)\ .filter_by(name=values['name']) @@ -242,14 +245,10 @@ def _get_provider_methods(self, provider): return methods def _set_provider_methods(self, ctxt, provider, config_methods): - """ - Helper method for setting the Methods for a Provider - """ + """Helper method for setting the Methods for a Provider""" existing = self._get_provider_methods(provider) - for method in config_methods: self._set_method(provider, method, existing) - self._save(provider) def _set_method(self, provider, method, existing): key = models.PGMethod.make_key(method) @@ -260,25 +259,6 @@ def _set_method(self, provider, method, existing): row = models.PGMethod(**method) provider.methods.append(row) - # PGMethods - def create_pg_method(self, ctxt, values): - row = models.PGMethod(**values) - self._save(row) - return dict(row) - - def list_pg_methods(self, ctxt, **kw): - return self._list(models.PGMethod, **kw) - - def get_pg_method(self, ctxt, id_): - return self._get(models.PGMethod, id_) - - def update_pg_method(self, ctxt, id_, values): - row = self._update(models.PGMethod, id_, values) - return dict(row) - - def delete_pg_method(self, ctxt, id_): - return self._delete(models.PGMethod, id_) - # Payment Gateway Configuration def create_pg_config(self, ctxt, merchant_id, values): merchant = self._get(models.Merchant, merchant_id) @@ -315,7 +295,7 @@ def create_payment_method(self, ctxt, customer_id, values): row.customer = customer self._save(row) - return self._dict(row, extra=['provider_method']) + return self._dict(row) def list_payment_methods(self, ctxt, criterion=None, **kw): query = self.session.query(models.PaymentMethod) @@ -325,15 +305,15 @@ def list_payment_methods(self, ctxt, criterion=None, **kw): rows = self._list(query=query, cls=models.PaymentMethod, criterion=criterion, **kw) - return [self._dict(row, extra=['provider_method']) for row in rows] + return [self._dict(row) for row in rows] def get_payment_method(self, ctxt, id_, **kw): row = self._get_id_or_name(models.PaymentMethod, id_) - return self._dict(row, extra=['provider_method']) + return self._dict(row) def update_payment_method(self, ctxt, id_, values): row = self._update(models.PaymentMethod, id_, values) - return self._dict(row, extra=['provider_method']) + return self._dict(row) def delete_payment_method(self, ctxt, id_): self._delete(models.PaymentMethod, id_) @@ -537,6 +517,8 @@ def create_product(self, ctxt, merchant_id, values): :param merchant_id: The Merchant :param values: Values describing the new Product """ + values = values.copy() + merchant = self._get(models.Merchant, merchant_id) properties = values.pop('properties', {}) @@ -574,6 +556,7 @@ def update_product(self, ctxt, id_, values): :param id_: The Product ID :param values: Values to update with """ + values = values.copy() properties = values.pop('properties', {}) row = self._get(models.Product, id_) diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 175dbfe..3b4c9dd 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -227,11 +227,6 @@ class PaymentMethod(BASE, BaseMixin): customer_id = Column(UUID, ForeignKey('customer.id', onupdate='CASCADE'), nullable=False) - provider_method = relationship('PGMethod', - backref='payment_methods') - provider_method_id = Column(UUID, ForeignKey('pg_method.id', - onupdate='CASCADE'), nullable=False) - provider_config = relationship('PGConfig', backref='payment_methods') provider_config_id = Column(UUID, ForeignKey('pg_config.id', onupdate='CASCADE'), nullable=False) diff --git a/billingstack/tests/api/v1/test_payment_method.py b/billingstack/tests/api/v1/test_payment_method.py index 27eb749..17e4a87 100644 --- a/billingstack/tests/api/v1/test_payment_method.py +++ b/billingstack/tests/api/v1/test_payment_method.py @@ -38,7 +38,6 @@ def setUp(self): def test_create_payment_method(self): fixture = self.get_fixture('payment_method') - fixture['provider_method_id'] = self.provider['methods'][0]['id'] fixture['provider_config_id'] = self.pg_config['id'] url = self.path % (self.merchant['id'], self.customer['id']) @@ -49,7 +48,6 @@ def test_create_payment_method(self): def test_list_payment_methods(self): values = { - 'provider_method_id': self.provider['methods'][0]['id'], 'provider_config_id': self.pg_config['id'] } self.create_payment_method(self.customer['id'], values=values) @@ -61,7 +59,6 @@ def test_list_payment_methods(self): def test_get_payment_method(self): values = { - 'provider_method_id': self.provider['methods'][0]['id'], 'provider_config_id': self.pg_config['id'] } _, method = self.create_payment_method( @@ -76,7 +73,6 @@ def test_get_payment_method(self): def test_update_payment_method(self): values = { - 'provider_method_id': self.provider['methods'][0]['id'], 'provider_config_id': self.pg_config['id'] } fixture, method = self.create_payment_method( @@ -91,7 +87,6 @@ def test_update_payment_method(self): def test_delete_payment_method(self): values = { - 'provider_method_id': self.provider['methods'][0]['id'], 'provider_config_id': self.pg_config['id'] } _, method = self.create_payment_method( diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 116d1d6..a54f6b9 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -133,7 +133,6 @@ def get_api_service(self): return api_service.Service() def setSamples(self): - _, self.pg_method = self.create_pg_method() _, self.currency = self.create_currency() _, self.language = self.create_language() _, self.merchant = self.create_merchant() @@ -156,22 +155,16 @@ def create_invoice_state(self, fixture=0, values={}, **kw): return fixture, self.central_service.create_invoice_state( ctxt, fixture, **kw) - def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): - methods = [self.get_fixture('pg_method')] or methods + def pg_provider_register(self, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_provider', fixture, values) + if 'methods' not in fixture: + fixture['methods'] = [self.get_fixture('pg_method')] ctxt = kw.pop('context', self.admin_ctxt) - data = self.central_service.pg_provider_register(ctxt, fixture, - methods=methods, **kw) + data = self.central_service.pg_provider_register(ctxt, fixture, **kw) - fixture['methods'] = methods return fixture, data - def create_pg_method(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('pg_method') - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_pg_method(ctxt, fixture) - def _account_defaults(self, values): # NOTE: Do defaults if not 'currency_name' in values: diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index 8f81e5e..c369093 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -44,20 +44,17 @@ def create_currency(self, fixture=0, values={}, **kw): def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): methods = [self.get_fixture('pg_method')] or methods + if not 'methods' in values: + values['methods'] = methods + fixture = self.get_fixture('pg_provider', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) data = self.storage_conn.pg_provider_register( - ctxt, fixture, methods=methods, **kw) + ctxt, fixture.copy(), **kw) - fixture['methods'] = methods return fixture, data - def create_pg_method(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('pg_method') - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_pg_method(ctxt, fixture) - def create_merchant(self, fixture=0, values={}, **kw): fixture = self.get_fixture('merchant', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) @@ -132,18 +129,13 @@ def test_pg_provider_register(self): def test_pg_provider_register_different_methods(self): # Add a Global method method1 = {'type': 'creditcard', 'name': 'mastercard'} - self.storage_conn.create_pg_method(self.admin_ctxt, method1) - method2 = {'type': 'creditcard', 'name': 'amex'} - self.storage_conn.create_pg_method(self.admin_ctxt, method2) - method3 = {'type': 'creditcard', 'name': 'visa'} - methods = [method1, method2, method3] - provider = {'name': 'noop'} + provider = {'name': 'noop', 'methods': [method1, method2, method3]} provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, provider, methods) + self.admin_ctxt, provider) # TODO(ekarls): Make this more extensive? self.assertLen(3, provider['methods']) @@ -241,7 +233,6 @@ def test_create_payment_method(self): # Setup PaymentMethod values = { - 'provider_method_id': provider['methods'][0]['id'], 'provider_config_id': config['id']} fixture, data = self.create_payment_method( @@ -257,7 +248,6 @@ def test_get_payment_method(self): # Setup PaymentMethod values = { - 'provider_method_id': provider['methods'][0]['id'], 'provider_config_id': config['id']} _, expected = self.create_payment_method( @@ -274,7 +264,6 @@ def test_list_payment_methods(self): self.merchant['id'], values={'provider_id': provider['id']}) values = { - 'provider_method_id': provider['methods'][0]['id'], 'provider_config_id': config['id']} # Add two Customers with some methods @@ -309,7 +298,6 @@ def test_update_payment_method(self): # Setup PaymentMethod values = { - 'provider_method_id': provider['methods'][0]['id'], 'provider_config_id': config['id']} fixture, data = self.create_payment_method( @@ -334,7 +322,6 @@ def test_delete_payment_method(self): # Setup PaymentMethod values = { - 'provider_method_id': provider['methods'][0]['id'], 'provider_config_id': config['id']} fixture, data = self.create_payment_method( diff --git a/tools/load_samples.py b/tools/load_samples.py index 9da6098..2716300 100644 --- a/tools/load_samples.py +++ b/tools/load_samples.py @@ -44,9 +44,6 @@ def get_fixture(name, fixture=0, values={}): for l in samples['language']: languages[l['name']] = conn.create_language(ctxt, l) - for method in samples['pg_method']: - conn.create_pg_method(ctxt, method) - country_data = { "currency_name": currencies['nok']['name'], "language_name": languages['nor']['name']} From 9e4d12f0b82329ee7e7034834577a80686a49fcb Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 4 Apr 2013 10:04:49 +0000 Subject: [PATCH 093/182] Add provider_id --- billingstack/api/v1/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index 1edb269..c45d7ca 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -147,6 +147,7 @@ class Usage(Base): class PGConfig(Base): name = text title = text + provider_id = text properties = DictType(key_type=text, value_type=property_type) From d95a21e7567868afce3eb46802c1b203a408c0d3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 4 Apr 2013 10:54:28 +0000 Subject: [PATCH 094/182] Support default pg --- billingstack/api/v1/models.py | 30 ++++++++++++++----- .../storage/impl_sqlalchemy/__init__.py | 6 ++++ .../storage/impl_sqlalchemy/models.py | 11 ++++++- 3 files changed, 38 insertions(+), 9 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index c45d7ca..cb7fb8d 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -164,31 +164,45 @@ class PaymentMethod(Base): class Account(Base): + _keys = ['currency', 'language'] + currency = text language = text name = text + +class Merchant(Account): + default_gateway = text + def to_db(self): values = self.as_dict() - change_suffixes(values, ['currency', 'language'], shorten=False) + change_suffixes(values, self._keys, shorten=False) + values['default_gateway_id'] = values.pop('default_gateway') return values @classmethod def from_db(cls, values): - change_suffixes(values, ['currency', 'language']) + change_suffixes(values, cls._keys) + values['default_gateway'] = values.pop('default_gateway_id') return cls(**values) -class Merchant(Account): - pass - - class Customer(Account): + merchant_id = text + contact_info = [ContactInfo] + def __init__(self, **kw): infos = kw.get('contact_info', {}) kw['contact_info'] = [ContactInfo.from_db(i) for i in infos] super(Customer, self).__init__(**kw) - merchant_id = text - contact_info = [ContactInfo] + def to_db(self): + values = self.as_dict() + change_suffixes(values, self._keys, shorten=False) + return values + + @classmethod + def from_db(cls, values): + change_suffixes(values, cls._keys) + return cls(**values) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 225ec0e..5e82120 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -291,6 +291,12 @@ def create_payment_method(self, ctxt, customer_id, values): """ customer = self._get_id_or_name(models.Customer, customer_id) + # NOTE: Attempt to see if there's a default gateway if none is + # specified + if not values.get('provider_config_id') and \ + customer.merchant.default_gateway: + values['provider_config_id'] = customer.merchant.default_gateway_id + row = models.PaymentMethod(**values) row.customer = customer diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index 3b4c9dd..a260e20 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -145,11 +145,20 @@ class Merchant(BASE, BaseMixin): title = Column(Unicode(60)) customers = relationship('Customer', backref='merchant') - payment_gateways = relationship('PGConfig', backref='merchant') + payment_gateways = relationship( + 'PGConfig', backref='merchant', + primaryjoin='merchant.c.id==pg_config.c.merchant_id') plans = relationship('Plan', backref='merchant') products = relationship('Product', backref='merchant') + default_gateway = relationship( + 'PGConfig', uselist=False, + primaryjoin='merchant.c.id==pg_config.c.merchant_id') + default_gateway_id = Column(UUID, ForeignKey('pg_config.id', + use_alter=True, name='default_gateway'), + nullable=True) + currency = relationship('Currency', uselist=False, backref='merchants') currency_name = Column(Unicode(10), ForeignKey('currency.name'), nullable=False) From 77b4483ce5bf60547280981c62b4727f2d32f1f5 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 4 Apr 2013 18:00:24 +0000 Subject: [PATCH 095/182] PEP --- tools/resync_billingstack.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/resync_billingstack.py b/tools/resync_billingstack.py index a545af9..dec1718 100644 --- a/tools/resync_billingstack.py +++ b/tools/resync_billingstack.py @@ -16,7 +16,8 @@ cfg.CONF.import_opt('storage_driver', 'billingstack.central', group='service:central') -cfg.CONF.import_opt('database_connection', 'billingstack.storage.impl_sqlalchemy', +cfg.CONF.import_opt('database_connection', + 'billingstack.storage.impl_sqlalchemy', group='storage:sqlalchemy') From ea603d7db2da1f894527da6cc9489566b86028c3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 4 Apr 2013 18:05:20 +0000 Subject: [PATCH 096/182] Import opt and PEP --- tools/load_samples.py | 2 ++ tools/resync_billingstack.py | 5 ++++- tools/resync_identity.py | 5 ++++- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/tools/load_samples.py b/tools/load_samples.py index 2716300..9e1b02d 100644 --- a/tools/load_samples.py +++ b/tools/load_samples.py @@ -13,6 +13,8 @@ cfg.CONF.import_opt('storage_driver', 'billingstack.central', group='service:central') +cfg.CONF.import_opt('state_path', 'billingstack.paths') + cfg.CONF.import_opt( 'database_connection', 'billingstack.storage.impl_sqlalchemy', diff --git a/tools/resync_billingstack.py b/tools/resync_billingstack.py index dec1718..dcaadf7 100644 --- a/tools/resync_billingstack.py +++ b/tools/resync_billingstack.py @@ -16,11 +16,14 @@ cfg.CONF.import_opt('storage_driver', 'billingstack.central', group='service:central') + +cfg.CONF.import_opt('state_path', 'billingstack.paths') + + cfg.CONF.import_opt('database_connection', 'billingstack.storage.impl_sqlalchemy', group='storage:sqlalchemy') - if __name__ == '__main__': service.prepare_service(sys.argv) conn = get_connection() diff --git a/tools/resync_identity.py b/tools/resync_identity.py index a7a304a..dc64885 100644 --- a/tools/resync_identity.py +++ b/tools/resync_identity.py @@ -16,7 +16,10 @@ cfg.CONF.import_opt('storage_driver', 'billingstack.identity.api', group='service:identity_api') -cfg.CONF.import_opt('database_connection', 'billingstack.identity.impl_sqlalchemy', +cfg.CONF.import_opt('state_path', 'billingstack.paths') + +cfg.CONF.import_opt('database_connection', + 'billingstack.identity.impl_sqlalchemy', group='identity:sqlalchemy') From 2683691c3c3ee17d3cd7da20c8b3ed87f41c5af8 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 5 Apr 2013 18:12:16 +0000 Subject: [PATCH 097/182] Remove ABC --- billingstack/storage/base.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/billingstack/storage/base.py b/billingstack/storage/base.py index 8f24b25..fc64945 100644 --- a/billingstack/storage/base.py +++ b/billingstack/storage/base.py @@ -15,7 +15,6 @@ # under the License. # # Copied: Moniker -import abc from billingstack.plugin import Plugin @@ -25,25 +24,17 @@ class StorageEngine(Plugin): __plugin_ns__ = 'billingstack.storage' __plugin_type__ = 'storage' - @abc.abstractmethod def get_connection(self): """ Return a Connection instance based on the configuration settings. """ + raise NotImplementedError class Connection(object): """ A Connection """ - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def __init__(self): - """ - Constructor... - """ - def ping(self, context): """ Ping the Storage connection """ return { From 91442fc2667a48221c1cca1229b491051d1b965f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 5 Apr 2013 18:16:58 +0000 Subject: [PATCH 098/182] Put usages into its own service --- billingstack/api/v1/resources.py | 25 +++--- billingstack/rating/__init__.py | 12 +++ billingstack/rating/rpcapi.py | 40 +++++++++ billingstack/rating/service.py | 47 ++++++++++ billingstack/rating/storage/__init__.py | 31 +++++++ .../rating/storage/impl_sqlalchemy.py | 85 +++++++++++++++++++ .../storage/impl_sqlalchemy/__init__.py | 53 ------------ .../storage/impl_sqlalchemy/models.py | 29 ------- bin/billingstack-rater | 33 +++++++ etc/billingstack/billingstack.conf.sample | 36 ++++++-- setup.py | 6 +- tools/resync_rating.py | 32 +++++++ 12 files changed, 327 insertions(+), 102 deletions(-) create mode 100644 billingstack/rating/__init__.py create mode 100644 billingstack/rating/rpcapi.py create mode 100644 billingstack/rating/service.py create mode 100644 billingstack/rating/storage/__init__.py create mode 100644 billingstack/rating/storage/impl_sqlalchemy.py create mode 100644 bin/billingstack-rater create mode 100644 tools/resync_rating.py diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 83d9e07..4064ef9 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -20,6 +20,7 @@ from billingstack.api.base import Rest, Query from billingstack.api.v1 import models from billingstack.central.rpcapi import central_api +from billingstack.rating.rpcapi import rating_api from wsmeext.flask import signature @@ -676,40 +677,40 @@ def delete_subscription(merchant_id, subscription_id): # Usage -@bp.post('/merchants//usage') +@bp.post('/merchants//usages') @signature(models.Usage, str, body=models.Usage) def create_usage(merchant_id, body): - row = central_api.create_usage( + row = rating_api.create_usage( request.environ['context'], body.to_db()) return models.Usage.from_db(row) -@bp.get('/merchants//usage') +@bp.get('/merchants//usages') @signature([models.Usage], str, [Query]) def list_usages(merchant_id, q=[]): criterion = _query_to_criterion(q, merchant_id=merchant_id) - rows = central_api.list_usages( + rows = rating_api.list_usages( request.environ['context'], criterion=criterion) return map(models.Usage.from_db, rows) -@bp.get('/merchants//usage/') +@bp.get('/merchants//usages/') @signature([models.Usage], str, str) def get_usage(merchant_id, usage_id): - row = central_api.get_usage(request.environ['context'], - usage_id) + row = rating_api.get_usage(request.environ['context'], + usage_id) - return models.Invoice.from_db(row) + return models.Usage.from_db(row) -@bp.put('/merchants//usage/') +@bp.put('/merchants//usages/') @signature(models.Usage, str, str, body=models.Usage) def update_usage(merchant_id, usage_id, body): - row = central_api.update_usage( + row = rating_api.update_usage( request.environ['context'], usage_id, body.to_db()) @@ -717,9 +718,9 @@ def update_usage(merchant_id, usage_id, body): return models.Usage.from_db(row) -@bp.delete('/merchants//usage/') +@bp.delete('/merchants//usages/') def delete_usage(merchant_id, usage_id): - central_api.delete_usage( + rating_api.delete_usage( request.environ['context'], usage_id) return Response(status=204) diff --git a/billingstack/rating/__init__.py b/billingstack/rating/__init__.py new file mode 100644 index 0000000..a8fbd58 --- /dev/null +++ b/billingstack/rating/__init__.py @@ -0,0 +1,12 @@ +from oslo.config import cfg + +cfg.CONF.register_group(cfg.OptGroup( + name='service:rater', title="Configuration for Rating/Rater Service" +)) + +cfg.CONF.register_opts([ + cfg.IntOpt('workers', default=None, + help='Number of worker processes to spawn'), + cfg.StrOpt('storage-driver', default='sqlalchemy', + help='The storage driver to use'), +], group='service:rating') diff --git a/billingstack/rating/rpcapi.py b/billingstack/rating/rpcapi.py new file mode 100644 index 0000000..10f3f06 --- /dev/null +++ b/billingstack/rating/rpcapi.py @@ -0,0 +1,40 @@ +from oslo.config import cfg + +from billingstack.openstack.common.rpc import proxy + +rpcapi_opts = [ + cfg.StrOpt('rating_topic', default='rating', + help='the topic rating nodes listen on') +] + +cfg.CONF.register_opts(rpcapi_opts) + + +class RatingAPI(proxy.RpcProxy): + BASE_RPC_VERSION = '1.0' + + def __init__(self): + super(RatingAPI, self).__init__( + topic=cfg.CONF.rating_topic, + default_version=self.BASE_RPC_VERSION) + + # Subscriptions + def create_usage(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_usage', values=values)) + + def list_usages(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_usages', + criterion=criterion)) + + def get_usage(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_usage', id_=id_)) + + def update_usage(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_usage', id_=id_, + values=values)) + + def delete_usage(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_usage', id_=id_)) + + +rating_api = RatingAPI() diff --git a/billingstack/rating/service.py b/billingstack/rating/service.py new file mode 100644 index 0000000..8f8302a --- /dev/null +++ b/billingstack/rating/service.py @@ -0,0 +1,47 @@ +from oslo.config import cfg +from billingstack.openstack.common import log as logging +from billingstack.openstack.common.rpc import service as rpc_service +from billingstack.rating import storage + + +cfg.CONF.import_opt('rating_topic', 'billingstack.rating.rpcapi') +cfg.CONF.import_opt('host', 'billingstack.netconf') +cfg.CONF.import_opt('state_path', 'billingstack.paths') + +LOG = logging.getLogger(__name__) + + +class Service(rpc_service.Service): + """ + The Usage / Rater / Rating service for BillingStack. + + This is a service that will receive events typically from a Mediator like + like Medjatur or the DUDE from Dreamhost that pushes data to the API which + casts to this service. + """ + def __init__(self, *args, **kwargs): + kwargs.update( + host=cfg.CONF.host, + topic=cfg.CONF.rating_topic, + ) + + super(Service, self).__init__(*args, **kwargs) + + def start(self): + self.storage_conn = storage.get_connection() + super(Service, self).start() + + def create_usage(self, ctxt, values): + return self.storage_conn.create_usage(ctxt, values) + + def list_usages(self, ctxt, **kw): + return self.storage_conn.list_usages(ctxt, **kw) + + def get_usage(self, ctxt, id_): + return self.storage_conn.get_usage(ctxt, id_) + + def update_usage(self, ctxt, id_, values): + return self.storage_conn.update_usage(ctxt, id_, values) + + def delete_usage(self, ctxt, id_): + return self.storage_conn.delete_usage(ctxt, id_) diff --git a/billingstack/rating/storage/__init__.py b/billingstack/rating/storage/__init__.py new file mode 100644 index 0000000..8f074be --- /dev/null +++ b/billingstack/rating/storage/__init__.py @@ -0,0 +1,31 @@ +from oslo.config import cfg +from billingstack.storage import base + + +class StorageEngine(base.StorageEngine): + """Base class for the Rating storage""" + __plugin_ns__ = 'billingstack.rating.storage' + + +class Connection(base.Connection): + """Define the base API for Rating storage""" + def create_usage(self, ctxt, values): + raise NotImplementedError + + def list_usages(self, ctxt, **kw): + raise NotImplementedError + + def get_usage(self, ctxt, id_): + raise NotImplementedError + + def update_usage(self, ctxt, id_, values): + raise NotImplementedError + + def delete_usage(self, ctxt, id_): + raise NotImplementedError + + +def get_connection(): + name = cfg.CONF['service:rating'].storage_driver + plugin = StorageEngine.get_plugin(name, invoke_on_load=True) + return plugin.get_connection() diff --git a/billingstack/rating/storage/impl_sqlalchemy.py b/billingstack/rating/storage/impl_sqlalchemy.py new file mode 100644 index 0000000..83e8392 --- /dev/null +++ b/billingstack/rating/storage/impl_sqlalchemy.py @@ -0,0 +1,85 @@ +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +A Usage plugin using sqlalchemy... +""" +from oslo.config import cfg +from sqlalchemy import Column +from sqlalchemy import Unicode, Float, DateTime +from sqlalchemy.ext.declarative import declarative_base + +from billingstack.openstack.common import log as logging +from billingstack.rating.storage import Connection, StorageEngine +from billingstack.sqlalchemy.types import UUID +from billingstack.sqlalchemy import api, model_base, session + + +# DB SCHEMA +BASE = declarative_base(cls=model_base.ModelBase) + +LOG = logging.getLogger(__name__) + + +cfg.CONF.register_group(cfg.OptGroup( + name='rating:sqlalchemy', title='Config for rating sqlalchemy plugin')) + + +cfg.CONF.register_opts(session.SQLOPTS, group='rating:sqlalchemy') + + +class Usage(BASE, model_base.BaseMixin): + """ + A record of something that's used from for example a Metering system like + Ceilometer + """ + measure = Column(Unicode(255)) + start_timestamp = Column(DateTime) + end_timestamp = Column(DateTime) + + price = Column(Float) + total = Column(Float) + value = Column(Float) + merchant_id = Column(UUID) + product_id = Column(UUID, nullable=False) + subscription_id = Column(UUID, nullable=False) + + +class SQLAlchemyEngine(StorageEngine): + def get_connection(self): + return Connection() + + +class Connection(Connection, api.HelpersMixin): + def __init__(self): + self.setup('rating:sqlalchemy') + + def base(self): + return BASE + + def create_usage(self, ctxt, values): + row = Usage(**values) + self._save(row) + return dict(row) + + def list_usages(self, ctxt, **kw): + return self._list(Usage, **kw) + + def get_usage(self, ctxt, id_): + return self._get(Usage, id_) + + def update_usage(self, ctxt, id_, values): + return self._update(Usage, id_, values) + + def delete_usage(self, ctxt, id_): + self._delete(Usage, id_) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 5e82120..6500d23 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -756,56 +756,3 @@ def delete_subscription(self, ctxt, id_): :param id_: Subscription ID """ self._delete(models.Subscription, id_) - - # Usages - def _usage(self, row): - return dict(row) - - def create_usage(self, ctxt, values): - """ - Add a new Usage - - :param subscription_id: The Subscription - :param values: Values describing the new Subscription - """ - usage = models.Usage(**values) - - self._save(usage) - return self._usage(usage) - - def list_usages(self, ctxt, **kw): - """ - List Usage - """ - rows = self._list(models.Usage, **kw) - return map(self._usage, rows) - - def get_usage(self, ctxt, id_): - """ - Get a Usage - - :param id_: The Usage ID - """ - row = self._get(models.Usage, id_) - return self._usage(row) - - def update_usage(self, ctxt, id_, values): - """ - Update a Usage - - :param id_: The Usage ID - :param values: Values to update with - """ - row = self._get(models.Usage, id_) - row.update(values) - - self._save(row) - return self._usage(row) - - def delete_usage(self, ctxt, id_): - """ - Delete a Usage - - :param id_: Usage ID - """ - self._delete(models.Usage, id_) diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/storage/impl_sqlalchemy/models.py index a260e20..f745f95 100644 --- a/billingstack/storage/impl_sqlalchemy/models.py +++ b/billingstack/storage/impl_sqlalchemy/models.py @@ -377,13 +377,6 @@ class Subscription(BASE, BaseMixin): resource_id = Column(Unicode(255), nullable=False) resource_type = Column(Unicode(255), nullable=True) - usages = relationship( - 'Usage', - backref='subscription', - lazy='dynamic', - cascade='delete, delete-orphan', - passive_deletes=True) - plan = relationship('Plan', backref='subscriptions', uselist=False) plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), nullable=False) @@ -395,25 +388,3 @@ class Subscription(BASE, BaseMixin): payment_method = relationship('PaymentMethod', backref='subscriptions') payment_method_id = Column(UUID, ForeignKey('payment_method.id', ondelete='CASCADE', onupdate='CASCADE')) - - -class Usage(BASE, BaseMixin): - """ - A record of something that's used from for example a Metering system like - Ceilometer - """ - measure = Column(Unicode(255)) - start_timestamp = Column(DateTime) - end_timestamp = Column(DateTime) - - price = Column(Float) - total = Column(Float) - value = Column(Float) - - product = relationship('Product', backref='usages') - prodoct_id = Column(UUID, ForeignKey('product.id', onupdate='CASCADE'), - nullable=False) - - subscription_id = Column(UUID, ForeignKey('subscription.id', - onupdate='CASCADE'), - nullable=False) diff --git a/bin/billingstack-rater b/bin/billingstack-rater new file mode 100644 index 0000000..fd10c33 --- /dev/null +++ b/bin/billingstack-rater @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +import sys +import eventlet +from oslo.config import cfg +from billingstack.openstack.common import log as logging +from billingstack.openstack.common import service +from billingstack import utils +from billingstack.rating import service as rating_service + +eventlet.monkey_patch() + +utils.read_config('billingstack', sys.argv) + +logging.setup('billingstack') + +launcher = service.launch(rating_service.Service(), + cfg.CONF['service:rating'].workers) +launcher.wait() diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 0b098bc..3de6c48 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -42,9 +42,9 @@ api_port = 9092 admin_token = rand0m -####################### -## Storage Configuration -######################## +################################################# +# Central service +################################################# #----------------------- # SQLAlchemy Storage #----------------------- @@ -60,11 +60,31 @@ admin_token = rand0m #retry_interval = 10 -################################# -## Identity Storage Configuration -################################# +################################################# +# Rating service +################################################# + +#----------------------- +# SQLAlchemy Storage +#----------------------- +[rating:sqlalchemy] +# Database connection string - to configure options for a given implementation +# like sqlalchemy or other see below +#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack +#connection_debug = 100 +#connection_trace = False +#sqlite_synchronous = True +#idle_timeout = 3600 +#max_retries = 10 +#retry_interval = 10 + + +################################################# +# Identity service +################################################# + #----------------------- -# Identity SQLAlchemy Storage +# SQLAlchemy Storage #----------------------- [identity:sqlalchemy] # Database connection string - to configure options for a given implementation @@ -76,3 +96,5 @@ admin_token = rand0m #idle_timeout = 3600 #max_retries = 10 #retry_interval = 10 + + diff --git a/setup.py b/setup.py index 8921b58..e1ee081 100644 --- a/setup.py +++ b/setup.py @@ -51,13 +51,17 @@ 'bin/billingstack-db-manage', 'bin/billingstack-identity-api', 'bin/billingstack-manage', - 'bin/billingstack-central' + 'bin/billingstack-central', + 'bin/billingstack-rater' ], cmdclass=common_setup.get_cmdclass(), entry_points=textwrap.dedent(""" [billingstack.storage] sqlalchemy = billingstack.storage.impl_sqlalchemy:SQLAlchemyStorage + [billingstack.rating.storage] + sqlalchemy = billingstack.rating.storage.impl_sqlalchemy:SQLAlchemyEngine + [billingstack.payment_gateway] dummy = billingstack.payment_gateway.dummy:DummyProvider diff --git a/tools/resync_rating.py b/tools/resync_rating.py new file mode 100644 index 0000000..cf29a6a --- /dev/null +++ b/tools/resync_rating.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import sys + +from oslo.config import cfg + +from billingstack.openstack.common import log as logging + +from billingstack import service +from billingstack.rating.storage import get_connection + + +LOG = logging.getLogger(__name__) + + +cfg.CONF.import_opt('storage_driver', 'billingstack.rating.storage', + group='service:rating') + +cfg.CONF.import_opt('state_path', 'billingstack.paths') + +cfg.CONF.import_opt('database_connection', + 'billingstack.rating.storage.impl_sqlalchemy', + group='rating:sqlalchemy') + + +if __name__ == '__main__': + service.prepare_service(sys.argv) + connection = get_connection() + + LOG.info("Re-Syncing database") + connection.teardown_schema() + connection.setup_schema() From 962a8f20fc04a022078bb40e147c9fa207fae3a1 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 6 Apr 2013 18:45:57 +0000 Subject: [PATCH 099/182] Lookup a Plan by Subscription --- billingstack/central/rpcapi.py | 4 ++++ billingstack/central/service.py | 3 +++ billingstack/storage/impl_sqlalchemy/__init__.py | 11 +++++++++++ 3 files changed, 18 insertions(+) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 829089b..67c8fb0 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -204,6 +204,10 @@ def update_plan(self, ctxt, id_, values): def delete_plan(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_plan', id_=id_)) + def get_plan_by_subscription(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_plan_by_subscription', + id_=id_)) + # PlanItems def create_plan_item(self, ctxt, values): return self.call(ctxt, self.make_msg('create_plan_item', diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 43f4a9b..c774bc6 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -200,6 +200,9 @@ def update_plan(self, ctxt, id_, values): def delete_plan(self, ctxt, id_): return self.storage_conn.delete_plan(ctxt, id_) + def get_plan_by_subscription(self, ctxt, id_): + return self.storage_conn.get_plan_by_subscription(ctxt, id_) + def create_plan_item(self, ctxt, values): return self.storage_conn.create_plan_item(ctxt, values) diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/storage/impl_sqlalchemy/__init__.py index 6500d23..dc84027 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/storage/impl_sqlalchemy/__init__.py @@ -467,6 +467,17 @@ def delete_plan(self, ctxt, id_): """ self._delete(models.Plan, id_) + def get_plan_by_subscription(self, ctxt, subscription_id): + q = self.session.query(models.Plan).join(models.Subscription)\ + .filter(models.Subscription.id == subscription_id) + try: + row = q.one() + except exc.NoResultFound: + msg = 'Couldn\'t find any Plan for subscription %s' % \ + subscription_id + raise exceptions.NotFound(msg) + return self._plan(row) + # PlanItemw def _plan_item(self, row): entity = self._entity(row) From eba16fee1756c16d4f6e18988c24d3a78cde7bb0 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 8 Apr 2013 12:47:52 +0000 Subject: [PATCH 100/182] Always pass Merchant from the URL and fix Usage model --- billingstack/api/v1/models.py | 18 ++++++++++++++---- billingstack/api/v1/resources.py | 7 ++++--- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v1/models.py index cb7fb8d..144cee4 100644 --- a/billingstack/api/v1/models.py +++ b/billingstack/api/v1/models.py @@ -14,7 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. from wsme.types import text, DictType - +from datetime import datetime from billingstack.api.base import ModelBase, property_type from billingstack.openstack.common import log @@ -141,7 +141,15 @@ class Subscription(Base): class Usage(Base): - pass + measure = text + start_timestamp = datetime + end_timestamp = datetime + price = float + total = float + value = float + merchant_id = text + product_id = text + subscription_id = text class PGConfig(Base): @@ -178,13 +186,15 @@ class Merchant(Account): def to_db(self): values = self.as_dict() change_suffixes(values, self._keys, shorten=False) - values['default_gateway_id'] = values.pop('default_gateway') + if 'default_gateway' in values: + values['default_gateway_id'] = values.pop('default_gateway') return values @classmethod def from_db(cls, values): change_suffixes(values, cls._keys) - values['default_gateway'] = values.pop('default_gateway_id') + if 'default_gateway_id' in values: + values['default_gateway'] = values.pop('default_gateway_id') return cls(**values) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 4064ef9..2793b79 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -680,9 +680,10 @@ def delete_subscription(merchant_id, subscription_id): @bp.post('/merchants//usages') @signature(models.Usage, str, body=models.Usage) def create_usage(merchant_id, body): - row = rating_api.create_usage( - request.environ['context'], - body.to_db()) + values = body.to_db() + + values['merchant_id'] = merchant_id + row = rating_api.create_usage(request.environ['context'], values) return models.Usage.from_db(row) From e1f2df58a622bbb9fdc587a8db4ad026325dac6e Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 8 Apr 2013 17:20:50 +0000 Subject: [PATCH 101/182] Fixes #6 --- billingstack/api/v1/resources.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 2793b79..0a5d16c 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -677,7 +677,7 @@ def delete_subscription(merchant_id, subscription_id): # Usage -@bp.post('/merchants//usages') +@bp.post('/merchants//usage') @signature(models.Usage, str, body=models.Usage) def create_usage(merchant_id, body): values = body.to_db() @@ -688,7 +688,7 @@ def create_usage(merchant_id, body): return models.Usage.from_db(row) -@bp.get('/merchants//usages') +@bp.get('/merchants//usage') @signature([models.Usage], str, [Query]) def list_usages(merchant_id, q=[]): criterion = _query_to_criterion(q, merchant_id=merchant_id) @@ -699,7 +699,7 @@ def list_usages(merchant_id, q=[]): return map(models.Usage.from_db, rows) -@bp.get('/merchants//usages/') +@bp.get('/merchants//usage/') @signature([models.Usage], str, str) def get_usage(merchant_id, usage_id): row = rating_api.get_usage(request.environ['context'], @@ -708,7 +708,7 @@ def get_usage(merchant_id, usage_id): return models.Usage.from_db(row) -@bp.put('/merchants//usages/') +@bp.put('/merchants//usage/') @signature(models.Usage, str, str, body=models.Usage) def update_usage(merchant_id, usage_id, body): row = rating_api.update_usage( @@ -719,7 +719,7 @@ def update_usage(merchant_id, usage_id, body): return models.Usage.from_db(row) -@bp.delete('/merchants//usages/') +@bp.delete('/merchants//usage/') def delete_usage(merchant_id, usage_id): rating_api.delete_usage( request.environ['context'], From cf7d129f43c74c36f0cd8a9eb23a1d9849ed5c24 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 14 Apr 2013 21:12:30 -0700 Subject: [PATCH 102/182] Update Common --- billingstack/openstack/common/gettextutils.py | 23 ++++++- billingstack/openstack/common/jsonutils.py | 58 ++++++++++++----- billingstack/openstack/common/loopingcall.py | 64 +++++++++++++++++-- billingstack/openstack/common/rpc/amqp.py | 14 ++-- billingstack/openstack/common/rpc/common.py | 15 +---- .../openstack/common/rpc/dispatcher.py | 21 +++++- .../openstack/common/rpc/impl_fake.py | 15 +++-- .../openstack/common/rpc/impl_kombu.py | 20 ++---- .../openstack/common/rpc/impl_qpid.py | 20 ++---- billingstack/openstack/common/rpc/impl_zmq.py | 27 +++----- billingstack/openstack/common/rpc/proxy.py | 6 +- billingstack/openstack/common/rpc/service.py | 2 +- .../openstack/common/rpc/zmq_receiver.py | 41 ++++++++++++ billingstack/openstack/common/setup.py | 4 +- billingstack/openstack/common/threadgroup.py | 2 +- 15 files changed, 225 insertions(+), 107 deletions(-) create mode 100755 billingstack/openstack/common/rpc/zmq_receiver.py diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py index fbaecf9..fd35873 100644 --- a/billingstack/openstack/common/gettextutils.py +++ b/billingstack/openstack/common/gettextutils.py @@ -24,10 +24,27 @@ """ import gettext +import os - -t = gettext.translation('billingstack', 'locale', fallback=True) +_localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') +_t = gettext.translation('billingstack', localedir=_localedir, fallback=True) def _(msg): - return t.ugettext(msg) + return _t.ugettext(msg) + + +def install(domain): + """Install a _() function using the given translation domain. + + Given a translation domain, install a _() function using gettext's + install() function. + + The main difference from gettext.install() is that we allow + overriding the default localedir (e.g. /usr/share/locale) using + a translation-domain-specific environment variable (e.g. + NOVA_LOCALEDIR). + """ + gettext.install(domain, + localedir=os.environ.get(domain.upper() + '_LOCALEDIR'), + unicode=True) diff --git a/billingstack/openstack/common/jsonutils.py b/billingstack/openstack/common/jsonutils.py index cadcb80..189bbbd 100644 --- a/billingstack/openstack/common/jsonutils.py +++ b/billingstack/openstack/common/jsonutils.py @@ -38,11 +38,21 @@ import inspect import itertools import json +import types import xmlrpclib from billingstack.openstack.common import timeutils +_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, + inspect.isfunction, inspect.isgeneratorfunction, + inspect.isgenerator, inspect.istraceback, inspect.isframe, + inspect.iscode, inspect.isbuiltin, inspect.isroutine, + inspect.isabstract] + +_simple_types = (types.NoneType, int, basestring, bool, float, long) + + def to_primitive(value, convert_instances=False, convert_datetime=True, level=0, max_depth=3): """Convert a complex object into primitives. @@ -58,17 +68,30 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, Therefore, convert_instances=True is lossy ... be aware. """ - nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - for test in nasty: - if test(value): - return unicode(value) - - # value of itertools.count doesn't get caught by inspects - # above and results in infinite loop when list(value) is called. + # handle obvious types first - order of basic types determined by running + # full tests on nova project, resulting in the following counts: + # 572754 + # 460353 + # 379632 + # 274610 + # 199918 + # 114200 + # 51817 + # 26164 + # 6491 + # 283 + # 19 + if isinstance(value, _simple_types): + return value + + if isinstance(value, datetime.datetime): + if convert_datetime: + return timeutils.strtime(value) + else: + return value + + # value of itertools.count doesn't get caught by nasty_type_tests + # and results in infinite loop when list(value) is called. if type(value) == itertools.count: return unicode(value) @@ -91,17 +114,18 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, convert_datetime=convert_datetime, level=level, max_depth=max_depth) + if isinstance(value, dict): + return dict((k, recursive(v)) for k, v in value.iteritems()) + elif isinstance(value, (list, tuple)): + return [recursive(lv) for lv in value] + # It's not clear why xmlrpclib created their own DateTime type, but # for our purposes, make it a datetime type which is explicitly # handled if isinstance(value, xmlrpclib.DateTime): value = datetime.datetime(*tuple(value.timetuple())[:6]) - if isinstance(value, (list, tuple)): - return [recursive(v) for v in value] - elif isinstance(value, dict): - return dict((k, recursive(v)) for k, v in value.iteritems()) - elif convert_datetime and isinstance(value, datetime.datetime): + if convert_datetime and isinstance(value, datetime.datetime): return timeutils.strtime(value) elif hasattr(value, 'iteritems'): return recursive(dict(value.iteritems()), level=level + 1) @@ -112,6 +136,8 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, # Ignore class member vars. return recursive(value.__dict__, level=level + 1) else: + if any(test(value) for test in _nasty_type_tests): + return unicode(value) return value except TypeError: # Class objects are tricky since they may define something like diff --git a/billingstack/openstack/common/loopingcall.py b/billingstack/openstack/common/loopingcall.py index 7f76db6..1b46dbe 100644 --- a/billingstack/openstack/common/loopingcall.py +++ b/billingstack/openstack/common/loopingcall.py @@ -46,12 +46,23 @@ def __init__(self, retvalue=True): self.retvalue = retvalue -class LoopingCall(object): +class LoopingCallBase(object): def __init__(self, f=None, *args, **kw): self.args = args self.kw = kw self.f = f self._running = False + self.done = None + + def stop(self): + self._running = False + + def wait(self): + return self.done.wait() + + +class FixedIntervalLoopingCall(LoopingCallBase): + """A fixed interval looping call.""" def start(self, interval, initial_delay=None): self._running = True @@ -77,7 +88,7 @@ def _inner(): self.stop() done.send(e.retvalue) except Exception: - LOG.exception(_('in looping call')) + LOG.exception(_('in fixed duration looping call')) done.send_exception(*sys.exc_info()) return else: @@ -88,8 +99,49 @@ def _inner(): greenthread.spawn_n(_inner) return self.done - def stop(self): - self._running = False - def wait(self): - return self.done.wait() +# TODO(mikal): this class name is deprecated in Havana and should be removed +# in the I release +LoopingCall = FixedIntervalLoopingCall + + +class DynamicLoopingCall(LoopingCallBase): + """A looping call which sleeps until the next known event. + + The function called should return how long to sleep for before being + called again. + """ + + def start(self, initial_delay=None, periodic_interval_max=None): + self._running = True + done = event.Event() + + def _inner(): + if initial_delay: + greenthread.sleep(initial_delay) + + try: + while self._running: + idle = self.f(*self.args, **self.kw) + if not self._running: + break + + if periodic_interval_max is not None: + idle = min(idle, periodic_interval_max) + LOG.debug(_('Dynamic looping call sleeping for %.02f ' + 'seconds'), idle) + greenthread.sleep(idle) + except LoopingCallDone, e: + self.stop() + done.send(e.retvalue) + except Exception: + LOG.exception(_('in dynamic looping call')) + done.send_exception(*sys.exc_info()) + return + else: + done.send(True) + + self.done = done + + greenthread.spawn(_inner) + return self.done diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index a032211..2d97981 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -174,9 +174,6 @@ def join_consumer_pool(self, callback, pool_name, topic, exchange_name): def consume_in_thread(self): self.connection.consume_in_thread() - def consume_in_thread_group(self, thread_group): - self.connection.consume_in_thread_group(thread_group) - def __getattr__(self, key): """Proxy all other calls to the Connection instance""" if self.connection: @@ -411,15 +408,17 @@ def __call__(self, message_data): ctxt = unpack_context(self.conf, message_data) method = message_data.get('method') args = message_data.get('args', {}) - version = message_data.get('version', None) + version = message_data.get('version') + namespace = message_data.get('namespace') if not method: LOG.warn(_('no method for message: %s') % message_data) ctxt.reply(_('No method for message: %s') % message_data, connection_pool=self.connection_pool) return - self.pool.spawn_n(self._process_data, ctxt, version, method, args) + self.pool.spawn_n(self._process_data, ctxt, version, method, + namespace, args) - def _process_data(self, ctxt, version, method, args): + def _process_data(self, ctxt, version, method, namespace, args): """Process a message in a new thread. If the proxy object we have has a dispatch method @@ -430,7 +429,8 @@ def _process_data(self, ctxt, version, method, args): """ ctxt.update_store() try: - rval = self.proxy.dispatch(ctxt, version, method, **args) + rval = self.proxy.dispatch(ctxt, version, method, namespace, + **args) # Check if the result was a generator if inspect.isgenerator(rval): for x in rval: diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py index e5011ab..b753644 100644 --- a/billingstack/openstack/common/rpc/common.py +++ b/billingstack/openstack/common/rpc/common.py @@ -250,19 +250,6 @@ def consume_in_thread(self): """ raise NotImplementedError() - def consume_in_thread_group(self, thread_group): - """Spawn a thread to handle incoming messages in the supplied ThreadGroup. - - Spawn a thread that will be responsible for handling all incoming - messages for consumers that were set up on this connection. - - Message dispatching inside of this is expected to be implemented in a - non-blocking manner. An example implementation would be having this - thread pull messages in for all of the consumers, but utilize a thread - pool for dispatching the messages to the proxy objects. - """ - raise NotImplementedError() - def _safe_log(log_func, msg, msg_data): """Sanitizes the msg_data field before logging.""" @@ -352,7 +339,7 @@ def deserialize_remote_exception(conf, data): if not issubclass(klass, Exception): raise TypeError("Can only deserialize Exceptions") - failure = klass(**failure.get('kwargs', {})) + failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) except (AttributeError, TypeError, ImportError): return RemoteError(name, failure.get('message'), trace) diff --git a/billingstack/openstack/common/rpc/dispatcher.py b/billingstack/openstack/common/rpc/dispatcher.py index 251c9fb..e3f2067 100644 --- a/billingstack/openstack/common/rpc/dispatcher.py +++ b/billingstack/openstack/common/rpc/dispatcher.py @@ -103,13 +103,16 @@ def __init__(self, callbacks): self.callbacks = callbacks super(RpcDispatcher, self).__init__() - def dispatch(self, ctxt, version, method, **kwargs): + def dispatch(self, ctxt, version, method, namespace, **kwargs): """Dispatch a message based on a requested version. :param ctxt: The request context :param version: The requested API version from the incoming message :param method: The method requested to be called by the incoming message. + :param namespace: The namespace for the requested method. If None, + the dispatcher will look for a method on a callback + object with no namespace set. :param kwargs: A dict of keyword arguments to be passed to the method. :returns: Whatever is returned by the underlying method that gets @@ -120,13 +123,25 @@ def dispatch(self, ctxt, version, method, **kwargs): had_compatible = False for proxyobj in self.callbacks: - if hasattr(proxyobj, 'RPC_API_VERSION'): + # Check for namespace compatibility + try: + cb_namespace = proxyobj.RPC_API_NAMESPACE + except AttributeError: + cb_namespace = None + + if namespace != cb_namespace: + continue + + # Check for version compatibility + try: rpc_api_version = proxyobj.RPC_API_VERSION - else: + except AttributeError: rpc_api_version = '1.0' + is_compatible = rpc_common.version_is_compatible(rpc_api_version, version) had_compatible = had_compatible or is_compatible + if not hasattr(proxyobj, method): continue if is_compatible: diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py index 8d66284..f5764ed 100644 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ b/billingstack/openstack/common/rpc/impl_fake.py @@ -57,13 +57,14 @@ def __init__(self, topic, proxy): self.topic = topic self.proxy = proxy - def call(self, context, version, method, args, timeout): + def call(self, context, version, method, namespace, args, timeout): done = eventlet.event.Event() def _inner(): ctxt = RpcContext.from_dict(context.to_dict()) try: - rval = self.proxy.dispatch(context, version, method, **args) + rval = self.proxy.dispatch(context, version, method, + namespace, **args) res = [] # Caller might have called ctxt.reply() manually for (reply, failure) in ctxt._response: @@ -119,9 +120,6 @@ def close(self): def consume_in_thread(self): pass - def consume_in_thread_group(self, thread_group): - pass - def create_connection(conf, new=True): """Create a connection""" @@ -143,13 +141,15 @@ def multicall(conf, context, topic, msg, timeout=None): return args = msg.get('args', {}) version = msg.get('version', None) + namespace = msg.get('namespace', None) try: consumer = CONSUMERS[topic][0] except (KeyError, IndexError): return iter([None]) else: - return consumer.call(context, version, method, args, timeout) + return consumer.call(context, version, method, namespace, args, + timeout) def call(conf, context, topic, msg, timeout=None): @@ -186,9 +186,10 @@ def fanout_cast(conf, context, topic, msg): return args = msg.get('args', {}) version = msg.get('version', None) + namespace = msg.get('namespace', None) for consumer in CONSUMERS.get(topic, []): try: - consumer.call(context, version, method, args, None) + consumer.call(context, version, method, namespace, args, None) except Exception: pass diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index b3c2024..af59bae 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -721,25 +721,17 @@ def consume(self, limit=None): except StopIteration: return - def _consumer_thread_callback(self): - """ Consumer thread callback used by consume_in_* """ - try: - self.consume() - except greenlet.GreenletExit: - return - def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread""" - + def _consumer_thread(): + try: + self.consume() + except greenlet.GreenletExit: + return if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn( - self._consumer_thread_callback) + self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread - def consume_in_thread_group(self, thread_group): - """ Consume from all queues/consumers in the supplied ThreadGroup""" - thread_group.add_thread(self._consumer_thread_callback) - def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object""" proxy_cb = rpc_amqp.ProxyCallback( diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index 356886a..6c4c1c9 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -510,13 +510,6 @@ def notify_send(self, topic, msg, **kwargs): """Send a notify message on a topic""" self.publisher_send(NotifyPublisher, topic, msg) - def _consumer_thread_callback(self): - """ Consumer thread callback used by consume_in_* """ - try: - self.consume() - except greenlet.GreenletExit: - return - def consume(self, limit=None): """Consume from all queues/consumers""" it = self.iterconsume(limit=limit) @@ -528,16 +521,15 @@ def consume(self, limit=None): def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread""" - + def _consumer_thread(): + try: + self.consume() + except greenlet.GreenletExit: + return if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn( - self._consumer_thread_callback) + self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread - def consume_in_thread_group(self, thread_group): - """ Consume from all queues/consumers in the supplied ThreadGroup""" - thread_group.add_thread(self._consumer_thread_callback) - def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object""" proxy_cb = rpc_amqp.ProxyCallback( diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index e27f6ec..2cd8126 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -276,7 +276,8 @@ def _get_response(self, ctx, proxy, topic, data): try: result = proxy.dispatch( - ctx, data['version'], data['method'], **data['args']) + ctx, data['version'], data['method'], + data.get('namespace'), **data['args']) return ConsumerBase.normalize_reply(result, ctx.replies) except greenlet.GreenletExit: # ignore these since they are just from shutdowns @@ -351,7 +352,7 @@ def process(self, proxy, ctx, data): return proxy.dispatch(ctx, data['version'], - data['method'], **data['args']) + data['method'], data.get('namespace'), **data['args']) class ZmqBaseReactor(ConsumerBase): @@ -405,24 +406,17 @@ def register(self, proxy, in_addr, zmq_type_in, out_addr=None, LOG.info(_("Out reactor registered")) - def _consumer_thread_callback(self, sock): - """ Consumer thread callback used by consume_in_* """ - - LOG.info(_("Consuming socket")) - while True: - self.consume(sock) - def consume_in_thread(self): + def _consume(sock): + LOG.info(_("Consuming socket")) + while True: + self.consume(sock) + for k in self.proxies.keys(): self.threads.append( - self.pool.spawn(self._consumer_thread_callback, k) + self.pool.spawn(_consume, k) ) - def consume_in_thread_group(self, thread_group): - """ Consume from all queues/consumers in the supplied ThreadGroup""" - for k in self.proxies.keys(): - thread_group.add_thread(self._consumer_thread_callback, k) - def wait(self): for t in self.threads: t.wait() @@ -660,9 +654,6 @@ def consume_in_thread(self): _get_matchmaker().start_heartbeat() self.reactor.consume_in_thread() - def consume_in_thread_group(self, thread_group): - self.reactor.consume_in_thread_group(thread_group) - def _cast(addr, context, topic, msg, timeout=None, envelope=False, _msg_id=None): diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py index c1a6a02..e85bffd 100644 --- a/billingstack/openstack/common/rpc/proxy.py +++ b/billingstack/openstack/common/rpc/proxy.py @@ -58,9 +58,13 @@ def _get_topic(self, topic): """Return the topic to use for a message.""" return topic if topic else self.topic + @staticmethod + def make_namespaced_msg(method, namespace, **kwargs): + return {'method': method, 'namespace': namespace, 'args': kwargs} + @staticmethod def make_msg(method, **kwargs): - return {'method': method, 'args': kwargs} + return RpcProxy.make_namespaced_msg(method, None, **kwargs) def call(self, context, msg, topic=None, version=None, timeout=None): """rpc.call() a remote method. diff --git a/billingstack/openstack/common/rpc/service.py b/billingstack/openstack/common/rpc/service.py index c38e3c2..48f9298 100644 --- a/billingstack/openstack/common/rpc/service.py +++ b/billingstack/openstack/common/rpc/service.py @@ -63,7 +63,7 @@ def start(self): self.manager.initialize_service_hook(self) # Consume from all consumers in a thread - self.conn.consume_in_thread_group(self.tg) + self.conn.consume_in_thread() def stop(self): # Try to shut the connection down, but if we get any sort of diff --git a/billingstack/openstack/common/rpc/zmq_receiver.py b/billingstack/openstack/common/rpc/zmq_receiver.py new file mode 100755 index 0000000..17f9d06 --- /dev/null +++ b/billingstack/openstack/common/rpc/zmq_receiver.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import eventlet +eventlet.monkey_patch() + +import contextlib +import sys + +from oslo.config import cfg + +from billingstack.openstack.common import log as logging +from billingstack.openstack.common import rpc +from billingstack.openstack.common.rpc import impl_zmq + +CONF = cfg.CONF +CONF.register_opts(rpc.rpc_opts) +CONF.register_opts(impl_zmq.zmq_opts) + + +def main(): + CONF(sys.argv[1:], project='oslo') + logging.setup("oslo") + + with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: + reactor.consume_in_thread() + reactor.wait() diff --git a/billingstack/openstack/common/setup.py b/billingstack/openstack/common/setup.py index dec74fd..ba6b54a 100644 --- a/billingstack/openstack/common/setup.py +++ b/billingstack/openstack/common/setup.py @@ -171,8 +171,8 @@ def generate_authors(): " log --format='%aN <%aE>' | sort -u | " "egrep -v '" + jenkins_email + "'") changelog = _run_shell_command(git_log_cmd) - signed_cmd = ("git log --git-dir=" + git_dir + - " | grep -i Co-authored-by: | sort -u") + signed_cmd = ("git --git-dir=" + git_dir + + " log | grep -i Co-authored-by: | sort -u") signed_entries = _run_shell_command(signed_cmd) if signed_entries: new_entries = "\n".join( diff --git a/billingstack/openstack/common/threadgroup.py b/billingstack/openstack/common/threadgroup.py index 6895775..60b5c92 100644 --- a/billingstack/openstack/common/threadgroup.py +++ b/billingstack/openstack/common/threadgroup.py @@ -63,7 +63,7 @@ def __init__(self, thread_pool_size=10): def add_timer(self, interval, callback, initial_delay=None, *args, **kwargs): - pulse = loopingcall.LoopingCall(callback, *args, **kwargs) + pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) pulse.start(interval=interval, initial_delay=initial_delay) self.timers.append(pulse) From 347e76b41fcfe085d1597766e9539a9e3e51d99c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 16 Apr 2013 14:19:18 -0700 Subject: [PATCH 103/182] Add install by packages --- doc/source/install/packages.rst | 34 +++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 doc/source/install/packages.rst diff --git a/doc/source/install/packages.rst b/doc/source/install/packages.rst new file mode 100644 index 0000000..a408c2e --- /dev/null +++ b/doc/source/install/packages.rst @@ -0,0 +1,34 @@ +.. + Copyright 2013 Endre Karlson + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + + + +===================== + Installing Packages +===================== + +Common Steps +============ + +.. index:: + double: installing; common_steps + + +1. apt-get install python-software-properties +2. apt-add-repository ppa:openstack-ubuntu-testing/grizzly-trunk-testing +3. echo "deb http://cloudistic.me/packages precise main" > /etc/apt/sources.list.d/billingstack.list +4. wget -q http://cloudistic.me/packages/pubkey.gpg -O- | apt-key add - +5. apt-get update +6. apt-get install billingstack-central billingstack-api \ No newline at end of file From 488288b0a02d2bd51b67d27f73ba10425534cf1d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 16 Apr 2013 14:19:24 -0700 Subject: [PATCH 104/182] Add policy --- etc/billingstack/policy.json | 1 + 1 file changed, 1 insertion(+) create mode 100644 etc/billingstack/policy.json diff --git a/etc/billingstack/policy.json b/etc/billingstack/policy.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/etc/billingstack/policy.json @@ -0,0 +1 @@ +{} From 163590585503e68373caed1ef756d4901470bb5f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 23 Apr 2013 23:33:57 -0700 Subject: [PATCH 105/182] Remove ID API and fixup config --- billingstack/identity/__init__.py | 0 billingstack/identity/api/__init__.py | 28 --- billingstack/identity/api/app.py | 61 ----- billingstack/identity/api/config.py | 43 ---- billingstack/identity/api/hooks.py | 11 - billingstack/identity/api/v1.py | 216 ------------------ billingstack/identity/base.py | 173 -------------- billingstack/identity/cms.py | 174 -------------- billingstack/identity/impl_sqlalchemy.py | 216 ------------------ billingstack/identity/token_base.py | 80 ------- billingstack/identity/token_memcache.py | 128 ----------- billingstack/identity/utils.py | 53 ----- billingstack/tests/identity/__init__.py | 0 billingstack/tests/identity/test_api.py | 265 ---------------------- etc/billingstack/billingstack.conf.sample | 35 +-- 15 files changed, 21 insertions(+), 1462 deletions(-) delete mode 100644 billingstack/identity/__init__.py delete mode 100644 billingstack/identity/api/__init__.py delete mode 100644 billingstack/identity/api/app.py delete mode 100644 billingstack/identity/api/config.py delete mode 100644 billingstack/identity/api/hooks.py delete mode 100644 billingstack/identity/api/v1.py delete mode 100644 billingstack/identity/base.py delete mode 100644 billingstack/identity/cms.py delete mode 100644 billingstack/identity/impl_sqlalchemy.py delete mode 100644 billingstack/identity/token_base.py delete mode 100644 billingstack/identity/token_memcache.py delete mode 100644 billingstack/identity/utils.py delete mode 100644 billingstack/tests/identity/__init__.py delete mode 100644 billingstack/tests/identity/test_api.py diff --git a/billingstack/identity/__init__.py b/billingstack/identity/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/identity/api/__init__.py b/billingstack/identity/api/__init__.py deleted file mode 100644 index 8a54949..0000000 --- a/billingstack/identity/api/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2013 Woorea Solutions, S.L -# -# Author: Luis Gervaso -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -API_SERVICE_OPTS = [ - cfg.IntOpt('api_port', default=9092, - help='The port for the BS Identity API server'), - cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.StrOpt('storage_driver', default='sqlalchemy', - help='Storage driver to use'), -] - -cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:identity_api') diff --git a/billingstack/identity/api/app.py b/billingstack/identity/api/app.py deleted file mode 100644 index 837197f..0000000 --- a/billingstack/identity/api/app.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 Woorea Solutions, S.L -# -# Author: Luis Gervaso -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from pecan import configuration -from pecan import make_app - -from billingstack.api.hooks import ConfigHook, NoAuthHook -from billingstack.identity.api import config as api_config -from billingstack.identity.api.hooks import DBHook - - -def get_pecan_config(): - # Set up the pecan configuration - filename = api_config.__file__.replace('.pyc', '.py') - return configuration.conf_from_file(filename) - - -def setup_app(pecan_config=None, extra_hooks=None): - - app_hooks = [ConfigHook(), DBHook()] - - if extra_hooks: - app_hooks.extend(extra_hooks) - - if not pecan_config: - pecan_config = get_pecan_config() - - app_hooks.append(NoAuthHook()) - - configuration.set_config(dict(pecan_config), overwrite=True) - - app = make_app( - pecan_config.app.root, - static_root=pecan_config.app.static_root, - template_path=pecan_config.app.template_path, - logging=getattr(pecan_config, 'logging', {}), - debug=getattr(pecan_config.app, 'debug', False), - force_canonical=getattr(pecan_config.app, 'force_canonical', True), - hooks=app_hooks, - guess_content_type_from_ext=getattr( - pecan_config.app, - 'guess_content_type_from_ext', - True), - ) - - return app diff --git a/billingstack/identity/api/config.py b/billingstack/identity/api/config.py deleted file mode 100644 index c2b9e52..0000000 --- a/billingstack/identity/api/config.py +++ /dev/null @@ -1,43 +0,0 @@ -# Server Specific Configurations -server = { - 'port': '9001', - 'host': '0.0.0.0' -} - -# Pecan Application Configurations -app = { - 'root': 'billingstack.identity.api.v1.RootController', - 'modules': ['billingstack.identity.api'], - 'static_root': '%(confdir)s/public', - 'template_path': '%(confdir)s/templates', - 'debug': False, - 'enable_acl': True, -} - -logging = { - 'loggers': { - 'root': {'level': 'INFO', 'handlers': ['console']}, - 'billingstack': {'level': 'DEBUG', 'handlers': ['console']}, - 'wsme': {'level': 'DEBUG', 'handlers': ['console']} - }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - } - }, - 'formatters': { - 'simple': { - 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' - '[%(threadName)s] %(message)s') - } - }, -} - -# Custom Configurations must be in Python dictionary format:: -# -# foo = {'bar':'baz'} -# -# All configurations are accessible at:: -# pecan.conf diff --git a/billingstack/identity/api/hooks.py b/billingstack/identity/api/hooks.py deleted file mode 100644 index b9a0037..0000000 --- a/billingstack/identity/api/hooks.py +++ /dev/null @@ -1,11 +0,0 @@ -from pecan import hooks -from oslo.config import cfg - -from billingstack.identity.base import IdentityPlugin - - -class DBHook(hooks.PecanHook): - def before(self, state): - plugin = IdentityPlugin.get_plugin( - cfg.CONF['service:identity_api'].storage_driver) - state.request.storage_conn = plugin() diff --git a/billingstack/identity/api/v1.py b/billingstack/identity/api/v1.py deleted file mode 100644 index 2749bf9..0000000 --- a/billingstack/identity/api/v1.py +++ /dev/null @@ -1,216 +0,0 @@ -from pecan import request, expose, rest -import wsmeext.pecan as wsme_pecan -from wsme.types import text, wsattr - -from billingstack.api.base import ModelBase, RestBase - - -class LoginCredentials(ModelBase): - name = wsattr(text, mandatory=True) - password = text - merchant = text - - -class LoginResponse(ModelBase): - """ - The response of the login - """ - token = text - - -class User(ModelBase): - def __init__(self, **kw): - #kw['contact_info'] = ContactInfo(**kw.get('contact_info', {})) - super(User, self).__init__(**kw) - - id = text - name = text - password = text - - @classmethod - def from_db(cls, values): - """ - Remove the password and anything else that's private. - """ - del values['password'] - return cls(**values) - - -class Account(ModelBase): - id = text - name = text - type = text - - -class Role(ModelBase): - id = text - name = text - type = text - - -class UserController(RestBase): - """User controller""" - __id__ = 'user' - - @wsme_pecan.wsexpose(User) - def get_all(self): - row = request.storage_conn.get_user(request.ctxt, self.id_) - return User.from_db(row) - - @wsme_pecan.wsexpose(User, body=User) - def put(self, body): - row = request.storage_conn.update_user( - request.ctxt, - self.id_, - body.to_db()) - - return User.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.storage_conn.delete_user(request.ctxt, self.id_) - - -class UsersController(RestBase): - """Users controller""" - __resource__ = UserController - - @wsme_pecan.wsexpose([User]) - def get_all(self): - criterion = {} - rows = request.storage_conn.list_users( - request.ctxt, - criterion=criterion) - - return [User.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(User, body=User) - def post(self, body): - row = request.storage_conn.create_user( - request.ctxt, - body.to_db()) - - return User.from_db(row) - - -class AccountRolesController(rest.RestController): - def __init__(self, account_id, user_id, role_id): - self.account_id = account_id - self.user_id = user_id - self.role_id = role_id - - @wsme_pecan.wsexpose() - def put(self): - return request.storage_conn.create_grant(request.ctxt, self.user_id, - self.account_id, self.role_id) - - @wsme_pecan.wsexpose() - def delete(self): - request.storage_conn.revoke_grant(request.ctxt, self.user_id, - self.account_id, self.role_id) - - -class AccountController(RestBase): - @expose() - def _lookup(self, *remainder): - if remainder[0] == 'users' and remainder[2] == 'roles': - return AccountRolesController(self.id_, remainder[1], - remainder[3]), () - return super(AccountController, self)._lookup(remainder) - - @wsme_pecan.wsexpose(Account) - def get_all(self): - row = request.storage_conn.get_account(request.ctxt, self.id_) - return Account.from_db(row) - - @wsme_pecan.wsexpose(Account, body=Account) - def put(self, body): - row = request.storage_conn.update_account( - request.ctxt, - self.id_, - body.to_db()) - - return Account.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.storage_conn.delete_account(request.ctxt, self.id_) - - -class AccountsController(RestBase): - __resource__ = AccountController - - @wsme_pecan.wsexpose([Account]) - def get_all(self): - rows = request.storage_conn.list_accounts(request.ctxt) - return [Account.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(Account, body=Account) - def post(self, body): - row = request.storage_conn.create_account( - request.ctxt, - body.to_db()) - return Account.from_db(row) - - -class RoleController(RestBase): - @wsme_pecan.wsexpose(Role, unicode) - def get_all(self): - row = request.storage_conn.get_role(request.ctxt, self.id_) - return Role.from_db(row) - - @wsme_pecan.wsexpose(Role, body=Role) - def put(self, body): - row = request.storage_conn.update_role( - request.ctxt, - self.id_, - body.to_db()) - - return Role.from_db(row) - - @wsme_pecan.wsexpose() - def delete(self): - request.storage_conn.delete_role(request.ctxt, self.id_) - - -class RolesController(RestBase): - __resource__ = RoleController - - @wsme_pecan.wsexpose([Role]) - def get_all(self): - rows = request.storage_conn.list_roles(request.ctxt,) - return [Role.from_db(r) for r in rows] - - @wsme_pecan.wsexpose(Role, body=Role) - def post(self, body): - row = request.storage_conn.create_role( - request.ctxt, - body.to_db()) - return Role.from_db(row) - - -class TokensController(RestBase): - """ - controller that authenticates a user... - """ - - @wsme_pecan.wsexpose(LoginResponse, body=LoginCredentials) - def post(self, body): - data = { - 'user_id': body.name, - 'password': body.password} - - auth_response = request.storage_conn.authenticate(request.ctxt, **data) - return LoginResponse(**auth_response) - - -class V1Controller(RestBase): - accounts = AccountsController() - roles = RolesController() - users = UsersController() - - tokens = TokensController() - - -class RootController(RestBase): - v1 = V1Controller() diff --git a/billingstack/identity/base.py b/billingstack/identity/base.py deleted file mode 100644 index ddfa906..0000000 --- a/billingstack/identity/base.py +++ /dev/null @@ -1,173 +0,0 @@ -from oslo.config import cfg - -from billingstack.plugin import Plugin - -cfg.CONF.import_opt('storage_driver', 'billingstack.identity.api', - group='service:identity_api') - - -class IdentityPlugin(Plugin): - """ - A base IdentityPlugin - """ - __plugin_ns__ = 'billingstack.identity_plugin' - __plugin_type__ = 'identity' - - @classmethod - def get_plugin(self, name=cfg.CONF['service:identity_api'].storage_driver, - **kw): - return super(IdentityPlugin, self).get_plugin(name, **kw) - - def authenticate(self, context, user_id=None, password=None, - account_id=None): - """ - Authenticate a User - - :param user_id: User ID - :param password: User Password - :param account_id: User ID - """ - raise NotImplementedError - - def create_user(self, context, values): - """ - Create a User. - - :param values: The values to create the User from. - """ - raise NotImplementedError - - def list_users(self, context, criterion=None): - """ - List users. - - :param criterion: Criterion to filter on. - """ - raise NotImplementedError - - def get_user(self, context, id_): - """ - Get a User by ID. - - :param id_: User id. - """ - raise NotImplementedError - - def update_user(self, context, id, values): - """ - Update a User. - - :param id_: User ID. - :param values: Values to update the User with. - """ - raise NotImplementedError - - def delete_user(self, context, id_): - """ - Delete User. - - :param id_: User ID to delete. - """ - raise NotImplementedError - - def create_account(self, context, values): - """ - Create an Account. - - :param values: Values to create Account from. - """ - raise NotImplementedError - - def list_accounts(self, context, criterion=None): - """ - List Accounts. - - :param criterion: Criterion to filter on. - """ - raise NotImplementedError - - def get_account(self, context, id_): - """ - Get Account - - :param id_: Account ID. - """ - raise NotImplementedError - - def update_account(self, context, id_, values): - """ - Update Account. - - :param id_: Account ID. - :param values: Account values. - """ - raise NotImplementedError - - def delete_account(self, context, id_): - """ - Delete Account. - - :param id_: Account ID - """ - raise NotImplementedError - - def create_role(self, context, values): - """ - Create an Role. - - :param values: Values to create Role from. - """ - raise NotImplementedError - - def list_roles(self, context, criterion=None): - """ - List Accounts. - - :param criterion: Criterion to filter on. - """ - raise NotImplementedError - - def get_role(self, context, id_): - """ - Get Role. - - :param id_: Role ID. - """ - raise NotImplementedError - - def update_role(self, context, id_, values): - """ - Update Role. - - :param id_: Role ID. - :param values: Role values. - """ - raise NotImplementedError - - def delete_role(self, context, id_): - """ - Delete Role. - - :param id_: Role ID - """ - raise NotImplementedError - - def create_grant(self, context, user_id, account_id, role_id): - """ - Create a Grant - - :param user_id: User ID. - :param account_id: Account ID. - :param role_id: Role ID. - """ - raise NotImplementedError - - def remove_grant(self, context, user_id, account_id, role_id): - """ - Remove a Users Role grant on a Account - - :param user_id: User ID. - :param account_id: Account ID. - :param role_id: Role ID. - """ - raise NotImplementedError diff --git a/billingstack/identity/cms.py b/billingstack/identity/cms.py deleted file mode 100644 index 071a902..0000000 --- a/billingstack/identity/cms.py +++ /dev/null @@ -1,174 +0,0 @@ -import hashlib - -from billingstack.openstack.common import log - - -subprocess = None -LOG = log.getLogger(__name__) -PKI_ANS1_PREFIX = 'MII' - - -def _ensure_subprocess(): - # NOTE(vish): late loading subprocess so we can - # use the green version if we are in - # eventlet. - global subprocess - if not subprocess: - try: - from eventlet import patcher - if patcher.already_patched.get('os'): - from eventlet.green import subprocess - else: - import subprocess - except ImportError: - import subprocess - - -def cms_verify(formatted, signing_cert_file_name, ca_file_name): - """ - verifies the signature of the contents IAW CMS syntax - """ - _ensure_subprocess() - process = subprocess.Popen(["openssl", "cms", "-verify", - "-certfile", signing_cert_file_name, - "-CAfile", ca_file_name, - "-inform", "PEM", - "-nosmimecap", "-nodetach", - "-nocerts", "-noattr"], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - output, err = process.communicate(formatted) - retcode = process.poll() - if retcode: - LOG.error(_('Verify error: %s') % err) - raise subprocess.CalledProcessError(retcode, "openssl", output=err) - return output - - -def token_to_cms(signed_text): - copy_of_text = signed_text.replace('-', '/') - - formatted = "-----BEGIN CMS-----\n" - line_length = 64 - while len(copy_of_text) > 0: - if (len(copy_of_text) > line_length): - formatted += copy_of_text[:line_length] - copy_of_text = copy_of_text[line_length:] - else: - formatted += copy_of_text - copy_of_text = "" - formatted += "\n" - - formatted += "-----END CMS-----\n" - - return formatted - - -def verify_token(token, signing_cert_file_name, ca_file_name): - return cms_verify(token_to_cms(token), - signing_cert_file_name, - ca_file_name) - - -def is_ans1_token(token): - ''' - thx to ayoung for sorting this out. - - base64 decoded hex representation of MII is 3082 - In [3]: binascii.hexlify(base64.b64decode('MII=')) - Out[3]: '3082' - - re: http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf - - pg4: For tags from 0 to 30 the first octet is the identfier - pg10: Hex 30 means sequence, followed by the length of that sequence. - pg5: Second octet is the length octet - first bit indicates short or long form, next 7 bits encode the number - of subsequent octets that make up the content length octets as an - unsigned binary int - - 82 = 10000010 (first bit indicates long form) - 0000010 = 2 octets of content length - so read the next 2 octets to get the length of the content. - - In the case of a very large content length there could be a requirement to - have more than 2 octets to designate the content length, therefore - requiring us to check for MIM, MIQ, etc. - In [4]: base64.b64encode(binascii.a2b_hex('3083')) - Out[4]: 'MIM=' - In [5]: base64.b64encode(binascii.a2b_hex('3084')) - Out[5]: 'MIQ=' - Checking for MI would become invalid at 16 octets of content length - 10010000 = 90 - In [6]: base64.b64encode(binascii.a2b_hex('3090')) - Out[6]: 'MJA=' - Checking for just M is insufficient - - But we will only check for MII: - Max length of the content using 2 octets is 7FFF or 32767 - It's not practical to support a token of this length or greater in http - therefore, we will check for MII only and ignore the case of larger tokens - ''' - return token[:3] == PKI_ANS1_PREFIX - - -def cms_sign_text(text, signing_cert_file_name, signing_key_file_name): - """ Uses OpenSSL to sign a document - Produces a Base64 encoding of a DER formatted CMS Document - http://en.wikipedia.org/wiki/Cryptographic_Message_Syntax - """ - _ensure_subprocess() - process = subprocess.Popen(["openssl", "cms", "-sign", - "-signer", signing_cert_file_name, - "-inkey", signing_key_file_name, - "-outform", "PEM", - "-nosmimecap", "-nodetach", - "-nocerts", "-noattr"], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - output, err = process.communicate(text) - retcode = process.poll() - if retcode or "Error" in err: - if retcode == 3: - LOG.error(_("Signing error: Unable to load certificate - " - "ensure you've configured PKI with " - "'keystone-manage pki_setup'")) - else: - LOG.error(_('Signing error: %s') % err) - raise subprocess.CalledProcessError(retcode, "openssl") - return output - - -def cms_sign_token(text, signing_cert_file_name, signing_key_file_name): - output = cms_sign_text(text, signing_cert_file_name, signing_key_file_name) - return cms_to_token(output) - - -def cms_to_token(cms_text): - - start_delim = "-----BEGIN CMS-----" - end_delim = "-----END CMS-----" - signed_text = cms_text - signed_text = signed_text.replace('/', '-') - signed_text = signed_text.replace(start_delim, '') - signed_text = signed_text.replace(end_delim, '') - signed_text = signed_text.replace('\n', '') - - return signed_text - - -def cms_hash_token(token_id): - """ - return: for ans1_token, returns the hash of the passed in token - otherwise, returns what it was passed in. - """ - if token_id is None: - return None - if is_ans1_token(token_id): - hasher = hashlib.md5() - hasher.update(token_id) - return hasher.hexdigest() - else: - return token_id diff --git a/billingstack/identity/impl_sqlalchemy.py b/billingstack/identity/impl_sqlalchemy.py deleted file mode 100644 index 8a98b15..0000000 --- a/billingstack/identity/impl_sqlalchemy.py +++ /dev/null @@ -1,216 +0,0 @@ -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A Identity plugin... -""" -from oslo.config import cfg -from sqlalchemy import Column, ForeignKey -from sqlalchemy import Unicode -from sqlalchemy.orm import exc -from sqlalchemy.ext.declarative import declarative_base - -from billingstack import exceptions -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import JSON, UUID -from billingstack.sqlalchemy import api, model_base, session -from billingstack.identity.base import IdentityPlugin -from billingstack.identity import utils as identity_utils - - -LOG = logging.getLogger(__name__) - - -# DB SCHEMA -BASE = declarative_base(cls=model_base.ModelBase) - - -cfg.CONF.register_group(cfg.OptGroup( - name='identity:sqlalchemy', title='Config for internal identity plugin')) - - -cfg.CONF.register_opts(session.SQLOPTS, group='identity:sqlalchemy') - - -class Role(BASE, model_base.BaseMixin): - name = Column(Unicode(64), unique=True, nullable=False) - extra = Column(JSON) - - -class UserAccountGrant(BASE): - user_id = Column(UUID, ForeignKey('user.id', ondelete='CASCADE', - onupdate='CASCADE'), primary_key=True) - account_id = Column(UUID, ForeignKey('account.id', ondelete='CASCADE', - onupdate='CASCADE'), primary_key=True) - data = Column(JSON) - - -class Account(BASE, model_base.BaseMixin): - type = Column(Unicode(10), nullable=False) - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - - -class User(BASE, model_base.BaseMixin): - """ - A User that can login. - """ - name = Column(Unicode(20), nullable=False) - password = Column(Unicode(255), nullable=False) - - -class SQLAlchemyPlugin(IdentityPlugin, api.HelpersMixin): - """ - A Internal IdentityPlugin that currently relies on SQLAlchemy as - the "Backend" - """ - def __init__(self): - self.setup('identity:sqlalchemy') - - def base(self): - return BASE - - def authenticate(self, context, user_id=None, password=None, - account_id=None): - #self._get_by_name(models. - pass - - def create_user(self, context, values): - row = User(**values) - row.password = identity_utils.hash_password(row.password) - self._save(row) - return dict(row) - - def list_users(self, context, criterion=None): - rows = self._list(User, criterion=criterion) - return map(dict, rows) - - def get_user(self, context, id_): - row = self._get(User, id_) - return dict(row) - - def update_user(self, context, id_, values): - row = self._update(User, id_, values) - return dict(row) - - def delete_user(self, context, id_): - self._delete(User, id_) - - def create_account(self, context, values): - row = Account(**values) - self._save(row) - return dict(row) - - def list_accounts(self, context, criterion=None): - rows = self._list(Account, criterion=criterion) - return map(dict, rows) - - def get_account(self, context, id_): - row = self._get(Account, id_) - return dict(row) - - def update_account(self, context, id_, values): - row = self._update(Account, id_, values) - return dict(row) - - def delete_account(self, context, id_): - self._delete(Account, id_) - - def create_role(self, context, values): - row = Role(**values) - self._save(row) - return dict(row) - - def list_roles(self, context, criterion=None): - rows = self._list(Role, criterion=criterion) - return map(dict, rows) - - def get_role(self, context, id_): - row = self._get(Role, id_) - return dict(row) - - def update_role(self, context, id_, values): - row = self._update(Role, id_, values) - return dict(row) - - def delete_role(self, context, id_): - self._delete(Role, id_) - - def get_metadata(self, user_id=None, account_id=None): - q = self.session.query(UserAccountGrant)\ - .filter_by(user_id=user_id, account_id=account_id) - try: - return q.one().data - except exc.NoResultFound: - raise exceptions.NotFound - - def create_metadata(self, user_id, account_id, metadata): - ref = UserAccountGrant( - account_id=account_id, - user_id=user_id, - data=metadata) - ref.save(self.session) - return metadata - - def update_metadata(self, user_id, account_id, metadata): - q = self.session.query(UserAccountGrant)\ - .filter_by(user_id=user_id, account_id=account_id) - ref = q.first() - data = ref.data.copy() - data.update(metadata) - ref.data = data - ref.save(self.session) - return ref - - def create_grant(self, context, user_id, account_id, role_id): - self._get(Role, role_id) - - try: - ref = self.get_metadata(user_id=user_id, account_id=account_id) - is_new = False - except exceptions.NotFound: - ref = {} - is_new = True - - roles = set(ref.get('roles', [])) - roles.add(role_id) - ref['roles'] = list(roles) - - if is_new: - self.create_metadata(user_id, account_id, ref) - else: - self.update_metadata(user_id, account_id, ref) - - def revoke_grant(self, context, user_id, account_id, role_id): - self._get(Role, role_id) - - try: - ref = self.get_metadata(user_id=user_id, account_id=account_id) - is_new = False - except exceptions.NotFound: - ref = {} - is_new = True - - roles = set(ref.get('roles', [])) - - try: - roles.remove(role_id) - except KeyError: - raise exceptions.NotFound(role_id=role_id) - - ref['roles'] = list(roles) - - if is_new: - self.create_metadata(user_id, account_id, ref) - else: - self.update_metadata(user_id, account_id, ref) diff --git a/billingstack/identity/token_base.py b/billingstack/identity/token_base.py deleted file mode 100644 index c2c0ff1..0000000 --- a/billingstack/identity/token_base.py +++ /dev/null @@ -1,80 +0,0 @@ -import datetime - -from oslo.config import cfg - -from billingstack.identity import cms -from billingstack.openstack.common import timeutils -from billingstack.plugin import Plugin - - -cfg.CONF.register_group( - cfg.OptGroup(name='identity:token', title="Token configuration")) - - -cfg.CONF.register_opts([ - cfg.IntOpt('expiration', default=86400)], - group='identity:token') - - -def unique_id(token_id): - """Return a unique ID for a token. - - The returned value is useful as the primary key of a database table, - memcache store, or other lookup table. - - :returns: Given a PKI token, returns it's hashed value. Otherwise, returns - the passed-in value (such as a UUID token ID or an existing - hash). - """ - return cms.cms_hash_token(token_id) - - -def default_expire_time(): - """Determine when a fresh token should expire. - - Expiration time varies based on configuration (see ``[token] expiration``). - - :returns: a naive UTC datetime.datetime object - - """ - expiration = cfg.CONF['identity:token'].expiration - expire_delta = datetime.timedelta(seconds=expiration) - return timeutils.utcnow() + expire_delta - - -class TokenPlugin(Plugin): - __plugin_ns__ = 'billingstack.token' - __plugin_type__ = 'token' - - """ - Base for Token providers like Memcache, SQL, Redis..... - - Note: This is NOT responsable for user / password authentication. It's a - layer that manages tokens.... - """ - def get_token(self, token_id): - """ - Get a Token - - :param token_id: Token ID to get... - """ - raise NotImplementedError - - def delete_token(self, token_id): - """ - Delete a Token - - :param token_id: Token ID to delete. - """ - raise NotImplementedError - - def list_tokens(self): - """ - List tokens - """ - - def list_revoked(self): - """ - List out revoked Tokens. - """ - raise NotImplementedError diff --git a/billingstack/identity/token_memcache.py b/billingstack/identity/token_memcache.py deleted file mode 100644 index e246b96..0000000 --- a/billingstack/identity/token_memcache.py +++ /dev/null @@ -1,128 +0,0 @@ -import copy -import memcache - -from oslo.config import cfg - -from billingstack import exceptions -from billingstack.openstack.common.gettextutils import _ -from billingstack.identity.token_base import TokenPlugin -from billingstack.identity.token_base import default_expire_time, unique_id -from billingstack.openstack.common import jsonutils -from billingstack import utils - - -cfg.CONF.register_group( - cfg.OptGroup(name='token:memcache', title="Memcache")) - - -cfg.CONF.register_opts([ - cfg.StrOpt('memcache_servers', default='127.0.0.1:11211')], - group='token:memcache') - - -class MemcachePlugin(TokenPlugin): - __plugin_name__ = 'memcache' - - def __init__(self, client=None): - super(MemcachePlugin, self).__init__() - self._memcache_client = client - - @property - def client(self): - return self._memcache_client or self._get_memcache_client() - - def _get_memcache_client(self): - servers = cfg.CONF[self.name].memcache_servers.split(';') - self._memcache_client = memcache.Client(servers, debug=0) - return self._memcache_client - - def _prefix_token_id(self, token_id): - return 'token-%s' % token_id.encode('utf-8') - - def _prefix_user_id(self, user_id): - return 'usertokens-%s' % user_id.encode('utf-8') - - def get_token(self, token_id): - if token_id is None: - #FIXME(ekarlso): Better error here? - raise exceptions.NotFound - - ptk = self._prefix_token_id(token_id) - token = self.client.get(ptk) - - if token is None: - #FIXME(ekarlso): Better error here? - raise exceptions.NotFound - - return token - - def create_token(self, token_id, data): - data_copy = copy.deepcopy(data) - ptk = self._prefix_token_id(unique_id(token_id)) - - if not data_copy.get('expires'): - data_copy['expires'] = default_expire_time() - - kwargs = {} - - if data_copy['expires'] is not None: - expires_ts = utils.unixtime(data_copy['expires']) - kwargs['time'] = expires_ts - - self.client.set(ptk, data_copy, **kwargs) - - if 'id' in data['user']: - token_data = jsonutils.dumps(token_id) - user_id = data['user']['id'] - user_key = self._prefix_user_id(user_id) - - if not self.client.append(user_key, ',%s' % token_data): - if not self.client.add(user_key, token_data): - if not self.client.append(user_key, ',%s' % token_data): - msg = _('Unable to add token user list.') - raise exceptions.UnexpectedError(msg) - return copy.deepcopy(data_copy) - - def _add_to_revocation_list(self, data): - data_json = jsonutils.dumps(data) - if not self.client.append(self.revocation_key, ',%s' % data_json): - if not self.client.add(self.revocation_key, data_json): - if not self.client.append(self.revocation_key, - ',%s' % data_json): - msg = _('Unable to add token to revocation list.') - raise exceptions.UnexpectedError(msg) - - def delete_token(self, token_id): - # Test for existence - data = self.get_token(unique_id(token_id)) - ptk = self._prefix_token_id(unique_id(token_id)) - result = self.client.delete(ptk) - self._add_to_revocation_list(data) - return result - - def list_tokens(self, user_id, account_id=None, trust_id=None): - tokens = [] - user_key = self._prefix_user_id(user_id) - user_record = self.client.get(user_key) or "" - token_list = jsonutils.loads('[%s]' % user_record) - - for token_id in token_list: - ptk = self._prefix_token_id(token_id) - token_ref = self.client.get(ptk) - - if token_ref: - if account_id is not None: - account = token_ref.get('account') - if not account: - continue - if account.get('id') != account_id: - continue - - tokens.append(token_id) - return tokens - - def list_revoked_tokens(self): - list_json = self.client.get(self.revocation_key) - if list_json: - return jsonutils.loads('[%s]' % list_json) - return [] diff --git a/billingstack/identity/utils.py b/billingstack/identity/utils.py deleted file mode 100644 index fd7edb8..0000000 --- a/billingstack/identity/utils.py +++ /dev/null @@ -1,53 +0,0 @@ -import passlib.hash -from oslo.config import cfg -import random -import string - -from billingstack import exceptions - - -cfg.CONF.register_opts([ - cfg.IntOpt('crypt_strength', default=40000)], - group='service:identity_api') - - -MAX_PASSWORD_LENGTH = 4096 - - -def generate_random_string(chars=7): - return u''.join(random.sample(string.ascii_letters * 2 + string.digits, - chars)) - - -def trunc_password(password): - """Truncate passwords to the MAX_PASSWORD_LENGTH.""" - try: - if len(password) > MAX_PASSWORD_LENGTH: - return password[:MAX_PASSWORD_LENGTH] - else: - return password - except TypeError: - raise exceptions.ValidationError(attribute='string', target='password') - - -def hash_password(password): - """Hash a password. Hard.""" - password_utf8 = trunc_password(password).encode('utf-8') - if passlib.hash.sha512_crypt.identify(password_utf8): - return password_utf8 - h = passlib.hash.sha512_crypt.encrypt(password_utf8, - rounds=cfg.CONF.crypt_strength) - return h - - -def check_password(password, hashed): - """Check that a plaintext password matches hashed. - - hashpw returns the salt value concatenated with the actual hash value. - It extracts the actual salt if this value is then passed as the salt. - - """ - if password is None: - return False - password_utf8 = trunc_password(password).encode('utf-8') - return passlib.hash.sha512_crypt.verify(password_utf8, hashed) diff --git a/billingstack/tests/identity/__init__.py b/billingstack/tests/identity/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/identity/test_api.py b/billingstack/tests/identity/test_api.py deleted file mode 100644 index 2805ff1..0000000 --- a/billingstack/tests/identity/test_api.py +++ /dev/null @@ -1,265 +0,0 @@ -import os - -from pecan import set_config - -from oslo.config import cfg - -from billingstack.samples import get_samples -from billingstack.identity.base import IdentityPlugin -from billingstack.tests.base import BaseTestCase - - -cfg.CONF.import_opt( - 'database_connection', - 'billingstack.identity.impl_sqlalchemy', - group='identity:sqlalchemy') - - -ROLE = { - 'name': 'Member' -} - - -# FIXME: Remove or keep -class IdentityAPITest(BaseTestCase): - """ - billingstack.api base test - """ - - __test__ = False - PATH_PREFIX = '/v1' - - def setUp(self): - super(IdentityAPITest, self).setUp() - - self.samples = get_samples() - - self.config( - storage_driver='sqlalchemy', - group='service:identity_api' - ) - - self.config( - database_connection='sqlite://', - group='identity:sqlalchemy') - - self.plugin = IdentityPlugin.get_plugin(invoke_on_load=True) - self.plugin.setup_schema() - - self.app = self.make_app() - - def tearDown(self): - self.plugin.teardown_schema() - super(IdentityAPITest, self).tearDown() - set_config({}, overwrite=True) - - def make_config(self, enable_acl=True): - root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), - '..', - '..', - ) - ) - - return { - 'app': { - 'root': 'billingstack.identity.api.v1.RootController', - 'modules': ['billingstack.identity.api'], - 'static_root': '%s/public' % root_dir, - 'template_path': '%s/api/templates' % root_dir, - 'enable_acl': enable_acl, - }, - - 'logging': { - 'loggers': { - 'root': {'level': 'INFO', 'handlers': ['console']}, - 'wsme': {'level': 'INFO', 'handlers': ['console']}, - 'billingstack': {'level': 'DEBUG', - 'handlers': ['console'], - }, - }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'simple' - } - }, - 'formatters': { - 'simple': { - 'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]' - '[%(threadName)s] %(message)s') - } - }, - }, - } - - # Accounts - def test_create_account(self): - values = self.get_fixture('merchant') - values['type'] = 'merchant' - - self.post('accounts', values) - - def test_list_accounts(self): - resp = self.get('accounts') - self.assertLen(0, resp.json) - - def test_get_account(self): - values = self.get_fixture('merchant') - values['type'] = 'merchant' - - resp = self.post('accounts', values) - - resp_actual = self.get('accounts/%s' % resp.json['id']) - - self.assertData(resp.json, resp_actual.json) - - def test_update_account(self): - values = self.get_fixture('merchant') - values['type'] = 'merchant' - - resp = self.post('accounts', values) - - expected = dict(resp.json, name='Merchant') - - resp = self.put('accounts/%s' % expected['id'], expected) - - self.assertData(expected, resp.json) - - def test_delete_account(self): - values = self.get_fixture('merchant') - values['type'] = 'merchant' - - resp = self.post('accounts', values) - - self.delete('accounts/%s' % resp.json['id']) - - resp = self.get('accounts') - self.assertLen(0, resp.json) - - # Roles - def test_create_role(self): - values = ROLE.copy() - - resp = self.post('roles', values) - - assert resp.json['name'] == values['name'] - assert resp.json['id'] is not None - - def test_list_roles(self): - resp = self.get('roles') - self.assertLen(0, resp.json) - - def test_get_role(self): - values = ROLE.copy() - - resp = self.post('roles', values) - - resp_actual = self.get('roles/%s' % resp.json['id']) - - self.assertData(resp.json, resp_actual.json) - - def test_update_role(self): - values = ROLE.copy() - - resp = self.post('roles', values) - - expected = dict(resp.json, name='SuperMember') - - resp = self.put('roles/%s' % expected['id'], expected) - - self.assertData(expected, resp.json) - - def test_delete_role(self): - values = ROLE.copy() - - resp = self.post('roles', values) - - self.delete('roles/%s' % resp.json['id']) - - resp = self.get('roles') - self.assertLen(0, resp.json) - - def test_create_user(self): - values = self.get_fixture('user') - - self.post('users', values) - - def test_list_users(self): - resp = self.get('users') - self.assertLen(0, resp.json) - - def test_get_user(self): - values = self.get_fixture('user') - - resp = self.post('users', values) - - resp_actual = self.get('users/%s' % resp.json['id']) - - self.assertData(resp.json, resp_actual.json) - - def test_update_user(self): - values = self.get_fixture('user') - - resp = self.post('users', values) - - expected = dict(resp.json, name='test') - - resp = self.put('users/%s' % expected['id'], expected) - - self.assertData(expected, resp.json) - - def test_delete_user(self): - values = self.get_fixture('user') - - resp = self.post('users', values) - - self.delete('users/%s' % resp.json['id']) - - resp = self.get('users') - self.assertLen(0, resp.json) - - # Grants - def test_create_grant(self): - account_data = self.get_fixture('merchant') - account_data['type'] = 'merchant' - - account = self.post('accounts', account_data).json - - user_data = self.get_fixture('user') - user = self.post('users', user_data).json - - role_data = ROLE.copy() - role = self.post('roles', role_data).json - - url = 'accounts/%s/users/%s/roles/%s' % ( - account['id'], user['id'], role['id']) - - self.put(url, {}) - - def test_revoke_grant(self): - account_data = self.get_fixture('merchant') - account_data['type'] = 'merchant' - - account = self.post('accounts', account_data).json - - user_data = self.get_fixture('user') - user = self.post('users', user_data).json - - role_data = ROLE.copy() - role = self.post('roles', role_data).json - - url = 'accounts/%s/users/%s/roles/%s' % ( - account['id'], user['id'], role['id']) - - self.put(url, {}) - - self.delete(url) - - def test_login(self): - user_data = self.get_fixture('user') - self.post('users', user_data).json - - resp = self.post('tokens', user_data) - - assert 'token' in resp.json diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 3de6c48..a7a3978 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -32,16 +32,6 @@ allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack. # Port the bind the API server to #api_port = 9001 -[service:identity_api] -# Address to bind the API server -# api_host = 0.0.0.0 - -# Port the bind the API server to -api_port = 9092 - -admin_token = rand0m - - ################################################# # Central service ################################################# @@ -61,13 +51,13 @@ admin_token = rand0m ################################################# -# Rating service +# Biller service ################################################# #----------------------- # SQLAlchemy Storage #----------------------- -[rating:sqlalchemy] +[biller:sqlalchemy] # Database connection string - to configure options for a given implementation # like sqlalchemy or other see below #database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack @@ -80,13 +70,13 @@ admin_token = rand0m ################################################# -# Identity service +# Collector service ################################################# #----------------------- # SQLAlchemy Storage #----------------------- -[identity:sqlalchemy] +[collector:sqlalchemy] # Database connection string - to configure options for a given implementation # like sqlalchemy or other see below #database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack @@ -97,4 +87,21 @@ admin_token = rand0m #max_retries = 10 #retry_interval = 10 +################################################# +# Rating service +################################################# + +#----------------------- +# SQLAlchemy Storage +#----------------------- +[rating:sqlalchemy] +# Database connection string - to configure options for a given implementation +# like sqlalchemy or other see below +#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack +#connection_debug = 100 +#connection_trace = False +#sqlite_synchronous = True +#idle_timeout = 3600 +#max_retries = 10 +#retry_interval = 10 From bee4f363de9e2c58c492574bfd8bba4f3aeaf9c0 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 24 Apr 2013 01:40:57 -0700 Subject: [PATCH 106/182] Remove idenity_driver --- etc/billingstack/billingstack.conf.sample | 2 -- 1 file changed, 2 deletions(-) diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index a7a3978..8b17409 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -14,8 +14,6 @@ debug = True # Log directory #logdir = /var/log/billingstack -identity_driver = internal - allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack.common.exception # Enabled API Version 1 extensions From 8c437d02aa43717fd4e1e0852a161aeff1d69643 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 24 Apr 2013 06:32:25 -0700 Subject: [PATCH 107/182] Remove identity bin --- bin/billingstack-identity-api | 52 ----------------------------------- 1 file changed, 52 deletions(-) delete mode 100644 bin/billingstack-identity-api diff --git a/bin/billingstack-identity-api b/bin/billingstack-identity-api deleted file mode 100644 index e2c3bb7..0000000 --- a/bin/billingstack-identity-api +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 New Dream Network, LLC (DreamHost) -# -# Author: Doug Hellmann -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Set up the development API server. -""" -import os -import sys -from wsgiref import simple_server - -from oslo.config import cfg - -from billingstack.identity.api import app -from billingstack import service -from billingstack.openstack.common import log as logging - - -if __name__ == '__main__': - # Parse OpenStack config file and command line options, then - # configure logging. - service.prepare_service(sys.argv) - - # Build the WSGI app - root = app.setup_app() - - # Create the WSGI server and start it - host, port = cfg.CONF['service:identity_api'].api_listen, int(cfg.CONF['service:identity_api'].api_port) - srv = simple_server.make_server(host, port, root) - - print 'Starting server in PID %s' % os.getpid() - - print "serving on http://%s:%s" % (host, port) - - try: - srv.serve_forever() - except KeyboardInterrupt: - # allow CTRL+C to shutdown without an error - pass From 6161ff35f0beb1bcf77eeaf6fdd03a857de1779e Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 24 Apr 2013 06:44:21 -0700 Subject: [PATCH 108/182] Rename --- bin/{billingstack-pg-gateway => billingstack-collector} | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) rename bin/{billingstack-pg-gateway => billingstack-collector} (87%) diff --git a/bin/billingstack-pg-gateway b/bin/billingstack-collector similarity index 87% rename from bin/billingstack-pg-gateway rename to bin/billingstack-collector index 6ccac43..6ff723b 100644 --- a/bin/billingstack-pg-gateway +++ b/bin/billingstack-collector @@ -1,7 +1,6 @@ #!/usr/bin/env python -# Copyright 2012 Managed I.T. # -# Author: Kiall Mac Innes +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -29,5 +28,5 @@ utils.read_config('billingstack', sys.argv) logging.setup('billingstack') launcher = service.launch(central_service.Service(), - cfg.CONF['service:payment_gateway'].workers) + cfg.CONF['service:collector'].workers) launcher.wait() From ed031ac82f9693d05c6153d7876361a2b849f54a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 24 Apr 2013 06:44:37 -0700 Subject: [PATCH 109/182] Add sublime file --- billingstack.sublime-project | 59 ++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 billingstack.sublime-project diff --git a/billingstack.sublime-project b/billingstack.sublime-project new file mode 100644 index 0000000..87c9755 --- /dev/null +++ b/billingstack.sublime-project @@ -0,0 +1,59 @@ +{ + "folders": + [ + { + "file_exclude_patterns": + [ + "*.pyc", + "*.pyo", + "*.exe", + "*.dll", + "*.obj", + "*.o", + "*.a", + "*.lib", + "*.so", + "*.dylib", + "*.ncb", + "*.sdf", + "*.suo", + "*.pdb", + "*.idb", + ".DS_Store", + "*.class", + "*.psd", + "*.db", + ".vagrant", + ".noseids" + ], + "folder_exclude_patterns": + [ + ".svn", + ".git", + ".hg", + "CVS", + "*.egg", + "*.egg-info", + ".tox", + "venv", + ".venv", + "doc/build", + "doc/source/api" + ], + "path": "." + } + ], + "settings": + { + "default_line_ending": "unix", + "detect_indentation": false, + "ensure_newline_at_eof_on_save": true, + "rulers": + [ + 79 + ], + "tab_size": 4, + "translate_tabs_to_spaces": true, + "trim_trailing_white_space_on_save": true + } +} From b96b0b7c0ee523a919bba0ccd1f77d6b37581413 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 24 Apr 2013 06:57:22 -0700 Subject: [PATCH 110/182] Change to new format --- openstack.conf | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/openstack.conf b/openstack.conf index 4830922..0f654b2 100644 --- a/openstack.conf +++ b/openstack.conf @@ -1,3 +1,32 @@ [DEFAULT] -modules=iniparser,importutils,excutils,local,jsonutils,timeutils,service,eventlet_backdoor,loopingcall,utils,exception,setup,version,uuidutils,processutils,db,log,gettextutils,iso8601,notifier,rpc,context,threadgroup,network_utils,lockutils,fileutils + +# The list of modules to copy from oslo-incubator.git +module=context +module=db +module=eventlet_backdoor +module=exception +module=excutils +module=fileutils +module=gettextutils +module=importutils +module=iniparser +module=iso8601 +module=jsonutils +module=local +module=lockutils +module=log +module=loopingcall +module=network_utils +module=notifier +module=processutils +module=rpc +module=service +module=setup +module=threadgroup +module=timeutils +module=utils +module=uuidutils +module=version + +# Base base=billingstack From 076963a68106d28b75c3c70623851dd34294fc55 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 11:56:31 -0700 Subject: [PATCH 111/182] Change Rating > Rater --- billingstack/{rating => rater}/__init__.py | 2 +- billingstack/{rating => rater}/rpcapi.py | 12 ++++++------ billingstack/{rating => rater}/service.py | 6 +++--- billingstack/{rating => rater}/storage/__init__.py | 8 ++++---- .../{rating => rater}/storage/impl_sqlalchemy.py | 8 ++++---- billingstack/tests/rater/__init__.py | 0 bin/billingstack-rater | 6 +++--- setup.py | 5 ++--- 8 files changed, 23 insertions(+), 24 deletions(-) rename billingstack/{rating => rater}/__init__.py (93%) rename billingstack/{rating => rater}/rpcapi.py (78%) rename billingstack/{rating => rater}/service.py (90%) rename billingstack/{rating => rater}/storage/__init__.py (76%) rename billingstack/{rating => rater}/storage/impl_sqlalchemy.py (89%) create mode 100644 billingstack/tests/rater/__init__.py diff --git a/billingstack/rating/__init__.py b/billingstack/rater/__init__.py similarity index 93% rename from billingstack/rating/__init__.py rename to billingstack/rater/__init__.py index a8fbd58..4b07b70 100644 --- a/billingstack/rating/__init__.py +++ b/billingstack/rater/__init__.py @@ -9,4 +9,4 @@ help='Number of worker processes to spawn'), cfg.StrOpt('storage-driver', default='sqlalchemy', help='The storage driver to use'), -], group='service:rating') +], group='service:rater') diff --git a/billingstack/rating/rpcapi.py b/billingstack/rater/rpcapi.py similarity index 78% rename from billingstack/rating/rpcapi.py rename to billingstack/rater/rpcapi.py index 10f3f06..87868ae 100644 --- a/billingstack/rating/rpcapi.py +++ b/billingstack/rater/rpcapi.py @@ -3,19 +3,19 @@ from billingstack.openstack.common.rpc import proxy rpcapi_opts = [ - cfg.StrOpt('rating_topic', default='rating', - help='the topic rating nodes listen on') + cfg.StrOpt('rater_topic', default='rater', + help='the topic rater nodes listen on') ] cfg.CONF.register_opts(rpcapi_opts) -class RatingAPI(proxy.RpcProxy): +class RaterAPI(proxy.RpcProxy): BASE_RPC_VERSION = '1.0' def __init__(self): - super(RatingAPI, self).__init__( - topic=cfg.CONF.rating_topic, + super(RaterAPI, self).__init__( + topic=cfg.CONF.rater_topic, default_version=self.BASE_RPC_VERSION) # Subscriptions @@ -37,4 +37,4 @@ def delete_usage(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_usage', id_=id_)) -rating_api = RatingAPI() +rater_api = RaterAPI() diff --git a/billingstack/rating/service.py b/billingstack/rater/service.py similarity index 90% rename from billingstack/rating/service.py rename to billingstack/rater/service.py index 8f8302a..0181d93 100644 --- a/billingstack/rating/service.py +++ b/billingstack/rater/service.py @@ -1,10 +1,10 @@ from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.rating import storage +from billingstack.rater import storage -cfg.CONF.import_opt('rating_topic', 'billingstack.rating.rpcapi') +cfg.CONF.import_opt('rater_topic', 'billingstack.rater.rpcapi') cfg.CONF.import_opt('host', 'billingstack.netconf') cfg.CONF.import_opt('state_path', 'billingstack.paths') @@ -22,7 +22,7 @@ class Service(rpc_service.Service): def __init__(self, *args, **kwargs): kwargs.update( host=cfg.CONF.host, - topic=cfg.CONF.rating_topic, + topic=cfg.CONF.rater_topic, ) super(Service, self).__init__(*args, **kwargs) diff --git a/billingstack/rating/storage/__init__.py b/billingstack/rater/storage/__init__.py similarity index 76% rename from billingstack/rating/storage/__init__.py rename to billingstack/rater/storage/__init__.py index 8f074be..3f01725 100644 --- a/billingstack/rating/storage/__init__.py +++ b/billingstack/rater/storage/__init__.py @@ -3,12 +3,12 @@ class StorageEngine(base.StorageEngine): - """Base class for the Rating storage""" - __plugin_ns__ = 'billingstack.rating.storage' + """Base class for the rater storage""" + __plugin_ns__ = 'billingstack.rater.storage' class Connection(base.Connection): - """Define the base API for Rating storage""" + """Define the base API for rater storage""" def create_usage(self, ctxt, values): raise NotImplementedError @@ -26,6 +26,6 @@ def delete_usage(self, ctxt, id_): def get_connection(): - name = cfg.CONF['service:rating'].storage_driver + name = cfg.CONF['service:rater'].storage_driver plugin = StorageEngine.get_plugin(name, invoke_on_load=True) return plugin.get_connection() diff --git a/billingstack/rating/storage/impl_sqlalchemy.py b/billingstack/rater/storage/impl_sqlalchemy.py similarity index 89% rename from billingstack/rating/storage/impl_sqlalchemy.py rename to billingstack/rater/storage/impl_sqlalchemy.py index 83e8392..54e6ebd 100644 --- a/billingstack/rating/storage/impl_sqlalchemy.py +++ b/billingstack/rater/storage/impl_sqlalchemy.py @@ -20,7 +20,7 @@ from sqlalchemy.ext.declarative import declarative_base from billingstack.openstack.common import log as logging -from billingstack.rating.storage import Connection, StorageEngine +from billingstack.rater.storage import Connection, StorageEngine from billingstack.sqlalchemy.types import UUID from billingstack.sqlalchemy import api, model_base, session @@ -32,10 +32,10 @@ cfg.CONF.register_group(cfg.OptGroup( - name='rating:sqlalchemy', title='Config for rating sqlalchemy plugin')) + name='rater:sqlalchemy', title='Config for rater sqlalchemy plugin')) -cfg.CONF.register_opts(session.SQLOPTS, group='rating:sqlalchemy') +cfg.CONF.register_opts(session.SQLOPTS, group='rater:sqlalchemy') class Usage(BASE, model_base.BaseMixin): @@ -62,7 +62,7 @@ def get_connection(self): class Connection(Connection, api.HelpersMixin): def __init__(self): - self.setup('rating:sqlalchemy') + self.setup('rater:sqlalchemy') def base(self): return BASE diff --git a/billingstack/tests/rater/__init__.py b/billingstack/tests/rater/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/bin/billingstack-rater b/bin/billingstack-rater index fd10c33..86df9b1 100644 --- a/bin/billingstack-rater +++ b/bin/billingstack-rater @@ -20,7 +20,7 @@ from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common import service from billingstack import utils -from billingstack.rating import service as rating_service +from billingstack.rater import service as rater_service eventlet.monkey_patch() @@ -28,6 +28,6 @@ utils.read_config('billingstack', sys.argv) logging.setup('billingstack') -launcher = service.launch(rating_service.Service(), - cfg.CONF['service:rating'].workers) +launcher = service.launch(rater_service.Service(), + cfg.CONF['service:rater'].workers) launcher.wait() diff --git a/setup.py b/setup.py index e1ee081..7b0f3db 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,6 @@ scripts=[ 'bin/billingstack-api', 'bin/billingstack-db-manage', - 'bin/billingstack-identity-api', 'bin/billingstack-manage', 'bin/billingstack-central', 'bin/billingstack-rater' @@ -59,8 +58,8 @@ [billingstack.storage] sqlalchemy = billingstack.storage.impl_sqlalchemy:SQLAlchemyStorage - [billingstack.rating.storage] - sqlalchemy = billingstack.rating.storage.impl_sqlalchemy:SQLAlchemyEngine + [billingstack.rater.storage] + sqlalchemy = billingstack.rater.storage.impl_sqlalchemy:SQLAlchemyEngine [billingstack.payment_gateway] dummy = billingstack.payment_gateway.dummy:DummyProvider From 44a247eeb584eb31294550a26778f6f323ca693d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:23:06 -0700 Subject: [PATCH 112/182] Add remainding services and split pg code to collector --- billingstack/api/v1/resources.py | 12 ++--- billingstack/biller/__init__.py | 12 +++++ billingstack/biller/rpcapi.py | 22 ++++++++ billingstack/biller/service.py | 28 ++++++++++ billingstack/biller/storage/__init__.py | 17 +++++++ .../biller/storage/impl_sqlalchemy.py | 51 +++++++++++++++++++ billingstack/collector/__init__.py | 0 billingstack/collector/rpcapi.py | 22 ++++++++ .../{payment_gateway => collector}/service.py | 2 +- billingstack/tests/biller/__init__.py | 0 billingstack/tests/collector/__init__.py | 0 setup.py | 2 + 12 files changed, 161 insertions(+), 7 deletions(-) create mode 100644 billingstack/biller/__init__.py create mode 100644 billingstack/biller/rpcapi.py create mode 100644 billingstack/biller/service.py create mode 100644 billingstack/biller/storage/__init__.py create mode 100644 billingstack/biller/storage/impl_sqlalchemy.py create mode 100644 billingstack/collector/__init__.py create mode 100644 billingstack/collector/rpcapi.py rename billingstack/{payment_gateway => collector}/service.py (96%) create mode 100644 billingstack/tests/biller/__init__.py create mode 100644 billingstack/tests/collector/__init__.py diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 0a5d16c..142d1db 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -20,7 +20,7 @@ from billingstack.api.base import Rest, Query from billingstack.api.v1 import models from billingstack.central.rpcapi import central_api -from billingstack.rating.rpcapi import rating_api +from billingstack.rater.rpcapi import rater_api from wsmeext.flask import signature @@ -683,7 +683,7 @@ def create_usage(merchant_id, body): values = body.to_db() values['merchant_id'] = merchant_id - row = rating_api.create_usage(request.environ['context'], values) + row = rater_api.create_usage(request.environ['context'], values) return models.Usage.from_db(row) @@ -693,7 +693,7 @@ def create_usage(merchant_id, body): def list_usages(merchant_id, q=[]): criterion = _query_to_criterion(q, merchant_id=merchant_id) - rows = rating_api.list_usages( + rows = rater_api.list_usages( request.environ['context'], criterion=criterion) return map(models.Usage.from_db, rows) @@ -702,7 +702,7 @@ def list_usages(merchant_id, q=[]): @bp.get('/merchants//usage/') @signature([models.Usage], str, str) def get_usage(merchant_id, usage_id): - row = rating_api.get_usage(request.environ['context'], + row = rater_api.get_usage(request.environ['context'], usage_id) return models.Usage.from_db(row) @@ -711,7 +711,7 @@ def get_usage(merchant_id, usage_id): @bp.put('/merchants//usage/') @signature(models.Usage, str, str, body=models.Usage) def update_usage(merchant_id, usage_id, body): - row = rating_api.update_usage( + row = rater_api.update_usage( request.environ['context'], usage_id, body.to_db()) @@ -721,7 +721,7 @@ def update_usage(merchant_id, usage_id, body): @bp.delete('/merchants//usage/') def delete_usage(merchant_id, usage_id): - rating_api.delete_usage( + rater_api.delete_usage( request.environ['context'], usage_id) return Response(status=204) diff --git a/billingstack/biller/__init__.py b/billingstack/biller/__init__.py new file mode 100644 index 0000000..6bbc360 --- /dev/null +++ b/billingstack/biller/__init__.py @@ -0,0 +1,12 @@ +from oslo.config import cfg + +cfg.CONF.register_group(cfg.OptGroup( + name='service:biller', title="Configuration for Biller Service" +)) + +cfg.CONF.register_opts([ + cfg.IntOpt('workers', default=None, + help='Number of worker processes to spawn'), + cfg.StrOpt('storage-driver', default='sqlalchemy', + help='The storage driver to use'), +], group='service:biller') diff --git a/billingstack/biller/rpcapi.py b/billingstack/biller/rpcapi.py new file mode 100644 index 0000000..a2d7b62 --- /dev/null +++ b/billingstack/biller/rpcapi.py @@ -0,0 +1,22 @@ +from oslo.config import cfg + +from billingstack.openstack.common.rpc import proxy + +rpcapi_opts = [ + cfg.StrOpt('biller_topic', default='biller', + help='the topic biller nodes listen on') +] + +cfg.CONF.register_opts(rpcapi_opts) + + +class BillerAPI(proxy.RpcProxy): + BASE_RPC_VERSION = '1.0' + + def __init__(self): + super(BillerAPI, self).__init__( + topic=cfg.CONF.rater_topic, + default_version=self.BASE_RPC_VERSION) + + +biller_api = BillerAPI() diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py new file mode 100644 index 0000000..9ff366a --- /dev/null +++ b/billingstack/biller/service.py @@ -0,0 +1,28 @@ +from oslo.config import cfg +from billingstack.openstack.common import log as logging +from billingstack.openstack.common.rpc import service as rpc_service +from billingstack.biller import storage + + +cfg.CONF.import_opt('biller_topic', 'billingstack.biller.rpcapi') +cfg.CONF.import_opt('host', 'billingstack.netconf') +cfg.CONF.import_opt('state_path', 'billingstack.paths') + +LOG = logging.getLogger(__name__) + + +class Service(rpc_service.Service): + """ + Biller service + """ + def __init__(self, *args, **kwargs): + kwargs.update( + host=cfg.CONF.host, + topic=cfg.CONF.biller_topic, + ) + + super(Service, self).__init__(*args, **kwargs) + + def start(self): + self.storage_conn = storage.get_connection() + super(Service, self).start() diff --git a/billingstack/biller/storage/__init__.py b/billingstack/biller/storage/__init__.py new file mode 100644 index 0000000..0ad0cfc --- /dev/null +++ b/billingstack/biller/storage/__init__.py @@ -0,0 +1,17 @@ +from oslo.config import cfg +from billingstack.storage import base + + +class StorageEngine(base.StorageEngine): + """Base class for the biller storage""" + __plugin_ns__ = 'billingstack.biller.storage' + + +class Connection(base.Connection): + """Define the base API for biller storage""" + + +def get_connection(): + name = cfg.CONF['service:biller'].storage_driver + plugin = StorageEngine.get_plugin(name, invoke_on_load=True) + return plugin.get_connection() diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py new file mode 100644 index 0000000..8f688ef --- /dev/null +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -0,0 +1,51 @@ +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +A Usage plugin using sqlalchemy... +""" +from oslo.config import cfg +from sqlalchemy import Column +from sqlalchemy import Unicode, Float, DateTime +from sqlalchemy.ext.declarative import declarative_base + +from billingstack.openstack.common import log as logging +from billingstack.biller.storage import Connection, StorageEngine +from billingstack.sqlalchemy.types import UUID +from billingstack.sqlalchemy import api, model_base, session + + +# DB SCHEMA +BASE = declarative_base(cls=model_base.ModelBase) + +LOG = logging.getLogger(__name__) + + +cfg.CONF.register_group(cfg.OptGroup( + name='biller:sqlalchemy', title='Config for biller sqlalchemy plugin')) + + +cfg.CONF.register_opts(session.SQLOPTS, group='biller:sqlalchemy') + + +class SQLAlchemyEngine(StorageEngine): + def get_connection(self): + return Connection() + + +class Connection(Connection, api.HelpersMixin): + def __init__(self): + self.setup('biller:sqlalchemy') + + def base(self): + return BASE diff --git a/billingstack/collector/__init__.py b/billingstack/collector/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/collector/rpcapi.py b/billingstack/collector/rpcapi.py new file mode 100644 index 0000000..3d4d99e --- /dev/null +++ b/billingstack/collector/rpcapi.py @@ -0,0 +1,22 @@ +from oslo.config import cfg + +from billingstack.openstack.common.rpc import proxy + +rpcapi_opts = [ + cfg.StrOpt('collector_topic', default='collector', + help='the topic collector nodes listen on') +] + +cfg.CONF.register_opts(rpcapi_opts) + + +class CollectorAPI(proxy.RpcProxy): + BASE_RPC_VERSION = '1.0' + + def __init__(self): + super(CollectorAPI, self).__init__( + topic=cfg.CONF.collector_topic, + default_version=self.BASE_RPC_VERSION) + + +collector_api = CollectorAPI() diff --git a/billingstack/payment_gateway/service.py b/billingstack/collector/service.py similarity index 96% rename from billingstack/payment_gateway/service.py rename to billingstack/collector/service.py index 9ac3c20..768297e 100644 --- a/billingstack/payment_gateway/service.py +++ b/billingstack/collector/service.py @@ -10,7 +10,7 @@ cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('pg_topic', 'billingstack.payment_gateway.rpcapi') +cfg.CONF.import_opt('collector_topic', 'billingstack.collector.rpcapi') cfg.CONF.import_opt('state_path', 'billingstack.paths') diff --git a/billingstack/tests/biller/__init__.py b/billingstack/tests/biller/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/tests/collector/__init__.py b/billingstack/tests/collector/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/setup.py b/setup.py index 7b0f3db..3bfd702 100644 --- a/setup.py +++ b/setup.py @@ -51,6 +51,8 @@ 'bin/billingstack-db-manage', 'bin/billingstack-manage', 'bin/billingstack-central', + 'bin/billingstack-biller', + 'bin/billingstack-collector', 'bin/billingstack-rater' ], cmdclass=common_setup.get_cmdclass(), From 098689379c1cc720b96767b3a53e08acc82391d1 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:26:13 -0700 Subject: [PATCH 113/182] Missing bin --- bin/billingstack-biller | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 bin/billingstack-biller diff --git a/bin/billingstack-biller b/bin/billingstack-biller new file mode 100644 index 0000000..f1bc941 --- /dev/null +++ b/bin/billingstack-biller @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +import sys +import eventlet +from oslo.config import cfg +from billingstack.openstack.common import log as logging +from billingstack.openstack.common import service +from billingstack import utils +from billingstack.central import service as central_service + +eventlet.monkey_patch() + +utils.read_config('billingstack', sys.argv) + +logging.setup('billingstack') + +launcher = service.launch(central_service.Service(), + cfg.CONF['service:biller'].workers) +launcher.wait() From f56c330fe4d4cf04ce77534a44ffe74884b54da8 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:28:05 -0700 Subject: [PATCH 114/182] Use the right module --- bin/billingstack-biller | 4 ++-- bin/billingstack-collector | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bin/billingstack-biller b/bin/billingstack-biller index f1bc941..0478d16 100644 --- a/bin/billingstack-biller +++ b/bin/billingstack-biller @@ -19,7 +19,7 @@ from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common import service from billingstack import utils -from billingstack.central import service as central_service +from billingstack.biller import service as biller_service eventlet.monkey_patch() @@ -27,6 +27,6 @@ utils.read_config('billingstack', sys.argv) logging.setup('billingstack') -launcher = service.launch(central_service.Service(), +launcher = service.launch(biller_service.Service(), cfg.CONF['service:biller'].workers) launcher.wait() diff --git a/bin/billingstack-collector b/bin/billingstack-collector index 6ff723b..4a73d2c 100644 --- a/bin/billingstack-collector +++ b/bin/billingstack-collector @@ -19,7 +19,7 @@ from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common import service from billingstack import utils -from billingstack.central import service as central_service +from billingstack.collector import service as collector_service eventlet.monkey_patch() @@ -27,6 +27,6 @@ utils.read_config('billingstack', sys.argv) logging.setup('billingstack') -launcher = service.launch(central_service.Service(), +launcher = service.launch(collector_service.Service(), cfg.CONF['service:collector'].workers) launcher.wait() From c27e61030074caf8732ef83043c95805978348ce Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:35:29 -0700 Subject: [PATCH 115/182] Remove identity artifacts and add biller storage --- setup.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 3bfd702..8920e20 100644 --- a/setup.py +++ b/setup.py @@ -60,6 +60,9 @@ [billingstack.storage] sqlalchemy = billingstack.storage.impl_sqlalchemy:SQLAlchemyStorage + [billingstack.biller.storage] + sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine + [billingstack.rater.storage] sqlalchemy = billingstack.rater.storage.impl_sqlalchemy:SQLAlchemyEngine @@ -71,12 +74,6 @@ pg-list = billingstack.manage.provider:ProvidersList [billingstack.api.v1.extensions] - - [billingstack.identity_plugin] - sqlalchemy = billingstack.identity.impl_sqlalchemy:SQLAlchemyPlugin - - [billingstack.token_plugin] - memcache = billingstack.identity.token_memcache:MemcachePlugin """), classifiers=[ 'Development Status :: 3 - Alpha', From 27fe077531f545d2a38fad58f8035cc7d80c8134 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:35:48 -0700 Subject: [PATCH 116/182] biller not rater --- billingstack/biller/rpcapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/biller/rpcapi.py b/billingstack/biller/rpcapi.py index a2d7b62..85f69e4 100644 --- a/billingstack/biller/rpcapi.py +++ b/billingstack/biller/rpcapi.py @@ -15,7 +15,7 @@ class BillerAPI(proxy.RpcProxy): def __init__(self): super(BillerAPI, self).__init__( - topic=cfg.CONF.rater_topic, + topic=cfg.CONF.biller_topic, default_version=self.BASE_RPC_VERSION) From f81976d99c1ddce851f9c994abfbfaff8501467f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:36:05 -0700 Subject: [PATCH 117/182] collector not central --- billingstack/collector/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index 768297e..b302d30 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -21,7 +21,7 @@ class Service(rpc_service.Service): def __init__(self, *args, **kwargs): kwargs.update( host=cfg.CONF.host, - topic=cfg.CONF.central_topic, + topic=cfg.CONF.collector_topic, ) super(Service, self).__init__(*args, **kwargs) From a8b5a3b98f128c127452b1e3e6200e1c459ea2e3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:36:16 -0700 Subject: [PATCH 118/182] Add plugin name and missing options --- billingstack/biller/storage/impl_sqlalchemy.py | 4 +++- billingstack/collector/__init__.py | 12 ++++++++++++ billingstack/rater/storage/impl_sqlalchemy.py | 2 ++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py index 8f688ef..95e31d6 100644 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -39,9 +39,11 @@ class SQLAlchemyEngine(StorageEngine): + __plugin_name__ = 'sqlalchemy' + def get_connection(self): - return Connection() + return Connection() class Connection(Connection, api.HelpersMixin): def __init__(self): diff --git a/billingstack/collector/__init__.py b/billingstack/collector/__init__.py index e69de29..322c56b 100644 --- a/billingstack/collector/__init__.py +++ b/billingstack/collector/__init__.py @@ -0,0 +1,12 @@ +from oslo.config import cfg + +cfg.CONF.register_group(cfg.OptGroup( + name='service:collector', title="Configuration for collector Service" +)) + +cfg.CONF.register_opts([ + cfg.IntOpt('workers', default=None, + help='Number of worker processes to spawn'), + cfg.StrOpt('storage-driver', default='sqlalchemy', + help='The storage driver to use'), +], group='service:collector') diff --git a/billingstack/rater/storage/impl_sqlalchemy.py b/billingstack/rater/storage/impl_sqlalchemy.py index 54e6ebd..f6a503f 100644 --- a/billingstack/rater/storage/impl_sqlalchemy.py +++ b/billingstack/rater/storage/impl_sqlalchemy.py @@ -56,6 +56,8 @@ class Usage(BASE, model_base.BaseMixin): class SQLAlchemyEngine(StorageEngine): + __plugin_name__ = 'sqlalchemy' + def get_connection(self): return Connection() From 313b9d1603fc05cdfb807041cc10e0f26f50d783 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:43:09 -0700 Subject: [PATCH 119/182] Typos --- etc/billingstack/billingstack.conf.sample | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 8b17409..15db807 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -85,14 +85,15 @@ allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack. #max_retries = 10 #retry_interval = 10 + ################################################# -# Rating service +# Rater service ################################################# #----------------------- # SQLAlchemy Storage #----------------------- -[rating:sqlalchemy] +[rater:sqlalchemy] # Database connection string - to configure options for a given implementation # like sqlalchemy or other see below #database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack From 2b605025c13f8fe8ccfa144c636e1733416854a9 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 12:55:22 -0700 Subject: [PATCH 120/182] Fixup resync --- tools/{resync_rating.py => resync_biller.py} | 10 +++++----- tools/{resync_identity.py => resync_rater.py} | 16 ++++++++-------- 2 files changed, 13 insertions(+), 13 deletions(-) rename tools/{resync_rating.py => resync_biller.py} (64%) rename tools/{resync_identity.py => resync_rater.py} (51%) diff --git a/tools/resync_rating.py b/tools/resync_biller.py similarity index 64% rename from tools/resync_rating.py rename to tools/resync_biller.py index cf29a6a..4bc76f4 100644 --- a/tools/resync_rating.py +++ b/tools/resync_biller.py @@ -7,20 +7,20 @@ from billingstack.openstack.common import log as logging from billingstack import service -from billingstack.rating.storage import get_connection +from billingstack.biller.storage import get_connection LOG = logging.getLogger(__name__) -cfg.CONF.import_opt('storage_driver', 'billingstack.rating.storage', - group='service:rating') +cfg.CONF.import_opt('storage_driver', 'billingstack.biller.storage', + group='service:biller') cfg.CONF.import_opt('state_path', 'billingstack.paths') cfg.CONF.import_opt('database_connection', - 'billingstack.rating.storage.impl_sqlalchemy', - group='rating:sqlalchemy') + 'billingstack.biller.storage.impl_sqlalchemy', + group='biller:sqlalchemy') if __name__ == '__main__': diff --git a/tools/resync_identity.py b/tools/resync_rater.py similarity index 51% rename from tools/resync_identity.py rename to tools/resync_rater.py index dc64885..d3c43fc 100644 --- a/tools/resync_identity.py +++ b/tools/resync_rater.py @@ -7,26 +7,26 @@ from billingstack.openstack.common import log as logging from billingstack import service -from billingstack.identity.base import IdentityPlugin +from billingstack.rater.storage import get_connection LOG = logging.getLogger(__name__) -cfg.CONF.import_opt('storage_driver', 'billingstack.identity.api', - group='service:identity_api') +cfg.CONF.import_opt('storage_driver', 'billingstack.rater.storage', + group='service:rater') cfg.CONF.import_opt('state_path', 'billingstack.paths') cfg.CONF.import_opt('database_connection', - 'billingstack.identity.impl_sqlalchemy', - group='identity:sqlalchemy') + 'billingstack.rater.storage.impl_sqlalchemy', + group='rater:sqlalchemy') if __name__ == '__main__': service.prepare_service(sys.argv) - plugin = IdentityPlugin.get_plugin()() + connection = get_connection() LOG.info("Re-Syncing database") - plugin.teardown_schema() - plugin.setup_schema() + connection.teardown_schema() + connection.setup_schema() From 486a78ef64009ac321384a66f1d28c5a19aec49c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Apr 2013 13:59:20 -0700 Subject: [PATCH 121/182] Split up storage into each service --- .../biller/storage/impl_sqlalchemy.py | 1 + billingstack/central/service.py | 2 +- billingstack/central/storage/__init__.py | 57 +++++++++++++++++++ .../storage/impl_sqlalchemy/__init__.py | 11 ++-- .../impl_sqlalchemy/migration/README.md | 0 .../impl_sqlalchemy/migration/__init__.py | 0 .../impl_sqlalchemy/migration/alembic.ini | 0 .../migration/alembic_migrations/__init__.py | 0 .../migration/alembic_migrations/env.py | 6 +- .../alembic_migrations/script.py.mako | 0 .../alembic_migrations/versions/README | 0 .../storage/impl_sqlalchemy/migration/cli.py | 4 +- .../storage/impl_sqlalchemy/models.py | 0 billingstack/manage/database.py | 2 +- billingstack/storage/__init__.py | 47 --------------- billingstack/storage/base.py | 2 - billingstack/tests/base.py | 8 +-- billingstack/tests/storage/__init__.py | 2 +- billingstack/tests/storage/test_sqlalchemy.py | 2 +- etc/billingstack/billingstack.conf.sample | 2 +- setup.py | 4 +- tools/load_samples.py | 2 +- tools/resync_billingstack.py | 4 +- 23 files changed, 83 insertions(+), 73 deletions(-) create mode 100644 billingstack/central/storage/__init__.py rename billingstack/{ => central}/storage/impl_sqlalchemy/__init__.py (98%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/README.md (100%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/__init__.py (100%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/alembic.ini (100%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py (100%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/alembic_migrations/env.py (92%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako (100%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README (100%) rename billingstack/{ => central}/storage/impl_sqlalchemy/migration/cli.py (96%) rename billingstack/{ => central}/storage/impl_sqlalchemy/models.py (100%) diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py index 95e31d6..ee61f51 100644 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -45,6 +45,7 @@ def get_connection(self): return Connection() + class Connection(Connection, api.HelpersMixin): def __init__(self): self.setup('biller:sqlalchemy') diff --git a/billingstack/central/service.py b/billingstack/central/service.py index c774bc6..4b22cfd 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -2,7 +2,7 @@ from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service -from billingstack import storage +from billingstack.central import storage cfg.CONF.import_opt('central_topic', 'billingstack.central.rpcapi') diff --git a/billingstack/central/storage/__init__.py b/billingstack/central/storage/__init__.py new file mode 100644 index 0000000..c62cee2 --- /dev/null +++ b/billingstack/central/storage/__init__.py @@ -0,0 +1,57 @@ +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# Copied: Moniker +from oslo.config import cfg +from billingstack.openstack.common import log as logging +from billingstack.storage import base + + +LOG = logging.getLogger(__name__) + + +class StorageEngine(base.StorageEngine): + __plugin_type__ = 'central' + __plugin_ns__ = 'billingstack.central.storage' + + +class Connection(base.Connection): + pass + + +def get_engine(engine_name): + """ + Return the engine class from the provided engine name + """ + return StorageEngine.get_plugin(engine_name, invoke_on_load=True) + + +def get_connection(): + engine = get_engine(cfg.CONF['service:central'].storage_driver) + return engine.get_connection() + + +def setup_schema(): + """ Create the DB - Used for testing purposes """ + LOG.debug("Setting up Schema") + connection = get_connection() + connection.setup_schema() + + +def teardown_schema(): + """ Reset the DB to default - Used for testing purposes """ + LOG.debug("Tearing down Schema") + connection = get_connection() + connection.teardown_schema() diff --git a/billingstack/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py similarity index 98% rename from billingstack/storage/impl_sqlalchemy/__init__.py rename to billingstack/central/storage/impl_sqlalchemy/__init__.py index dc84027..9223177 100644 --- a/billingstack/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/central/storage/impl_sqlalchemy/__init__.py @@ -19,16 +19,17 @@ from billingstack.sqlalchemy import utils as db_utils, api from billingstack.sqlalchemy.session import SQLOPTS from billingstack.storage import base -from billingstack.storage.impl_sqlalchemy import models +from billingstack.central.storage import Connection, StorageEngine +from billingstack.central.storage.impl_sqlalchemy import models LOG = logging.getLogger(__name__) cfg.CONF.register_group(cfg.OptGroup( - name='storage:sqlalchemy', title="Configuration for SQLAlchemy Storage" + name='central:sqlalchemy', title="Configuration for SQLAlchemy Storage" )) -cfg.CONF.register_opts(SQLOPTS, group='storage:sqlalchemy') +cfg.CONF.register_opts(SQLOPTS, group='central:sqlalchemy') def filter_merchant_by_join(query, cls, criterion): @@ -41,14 +42,14 @@ def filter_merchant_by_join(query, cls, criterion): return query -class SQLAlchemyStorage(base.StorageEngine): +class SQLAlchemyEngine(StorageEngine): __plugin_name__ = 'sqlalchemy' def get_connection(self): return Connection(self.name) -class Connection(base.Connection, api.HelpersMixin): +class Connection(Connection, api.HelpersMixin): """ SQLAlchemy connection """ diff --git a/billingstack/storage/impl_sqlalchemy/migration/README.md b/billingstack/central/storage/impl_sqlalchemy/migration/README.md similarity index 100% rename from billingstack/storage/impl_sqlalchemy/migration/README.md rename to billingstack/central/storage/impl_sqlalchemy/migration/README.md diff --git a/billingstack/storage/impl_sqlalchemy/migration/__init__.py b/billingstack/central/storage/impl_sqlalchemy/migration/__init__.py similarity index 100% rename from billingstack/storage/impl_sqlalchemy/migration/__init__.py rename to billingstack/central/storage/impl_sqlalchemy/migration/__init__.py diff --git a/billingstack/storage/impl_sqlalchemy/migration/alembic.ini b/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini similarity index 100% rename from billingstack/storage/impl_sqlalchemy/migration/alembic.ini rename to billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini diff --git a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py similarity index 100% rename from billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py rename to billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py diff --git a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py similarity index 92% rename from billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py rename to billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py index 419eff8..5469d1b 100644 --- a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/env.py +++ b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py @@ -22,7 +22,7 @@ from alembic import context from sqlalchemy import create_engine, pool -from billingstack.storage.impl_sqlalchemy.models import ModelBase +from billingstack.central.storage.impl_sqlalchemy.models import ModelBase # this is the Alembic Config object, which provides @@ -50,7 +50,7 @@ def run_migrations_offline(): script output. """ - context.configure(url=billingstack_config['storage:sqlalchemy'] + context.configure(url=billingstack_config['central:sqlalchemy'] .database_connection) with context.begin_transaction(): @@ -65,7 +65,7 @@ def run_migrations_online(): """ engine = create_engine( - billingstack_config['storage:sqlalchemy'].database_connection, + billingstack_config['central:sqlalchemy'].database_connection, poolclass=pool.NullPool) connection = engine.connect() diff --git a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako similarity index 100% rename from billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako rename to billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako diff --git a/billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README similarity index 100% rename from billingstack/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README rename to billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README diff --git a/billingstack/storage/impl_sqlalchemy/migration/cli.py b/billingstack/central/storage/impl_sqlalchemy/migration/cli.py similarity index 96% rename from billingstack/storage/impl_sqlalchemy/migration/cli.py rename to billingstack/central/storage/impl_sqlalchemy/migration/cli.py index 51d4240..5d81a37 100644 --- a/billingstack/storage/impl_sqlalchemy/migration/cli.py +++ b/billingstack/central/storage/impl_sqlalchemy/migration/cli.py @@ -33,7 +33,7 @@ ] CONF = cfg.ConfigOpts() -CONF.register_opts(_db_opts, 'storage:sqlalchemy') +CONF.register_opts(_db_opts, 'central:sqlalchemy') def do_alembic_command(config, cmd, *args, **kwargs): @@ -116,7 +116,7 @@ def main(): ) config.set_main_option( 'script_location', - 'billingstack.storage.impl_sqlalchemy.migration:alembic_migrations') + 'billingstack.central.storage.impl_sqlalchemy.migration:alembic_migrations') # attach the Quantum conf to the Alembic conf config.billingstack_config = CONF diff --git a/billingstack/storage/impl_sqlalchemy/models.py b/billingstack/central/storage/impl_sqlalchemy/models.py similarity index 100% rename from billingstack/storage/impl_sqlalchemy/models.py rename to billingstack/central/storage/impl_sqlalchemy/models.py diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py index 85905e1..caff3c6 100644 --- a/billingstack/manage/database.py +++ b/billingstack/manage/database.py @@ -30,7 +30,7 @@ cfg.CONF.import_opt( 'database_connection', 'billingstack.storage.impl_sqlalchemy', - group='storage:sqlalchemy') + group='central:sqlalchemy') class DatabaseCommand(Command): diff --git a/billingstack/storage/__init__.py b/billingstack/storage/__init__.py index fb573c5..e69de29 100644 --- a/billingstack/storage/__init__.py +++ b/billingstack/storage/__init__.py @@ -1,47 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# Copied: Moniker -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.storage.base import StorageEngine - -LOG = logging.getLogger(__name__) - - -def get_engine(engine_name): - """ - Return the engine class from the provided engine name - """ - return StorageEngine.get_plugin(engine_name, invoke_on_load=True) - - -def get_connection(): - engine = get_engine(cfg.CONF['service:central'].storage_driver) - return engine.get_connection() - - -def setup_schema(): - """ Create the DB - Used for testing purposes """ - LOG.debug("Setting up Schema") - connection = get_connection() - connection.setup_schema() - - -def teardown_schema(): - """ Reset the DB to default - Used for testing purposes """ - LOG.debug("Tearing down Schema") - connection = get_connection() - connection.teardown_schema() diff --git a/billingstack/storage/base.py b/billingstack/storage/base.py index fc64945..9d09d06 100644 --- a/billingstack/storage/base.py +++ b/billingstack/storage/base.py @@ -20,8 +20,6 @@ class StorageEngine(Plugin): """ Base class for storage engines """ - - __plugin_ns__ = 'billingstack.storage' __plugin_type__ = 'storage' def get_connection(self): diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index a54f6b9..3d19959 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -6,7 +6,7 @@ # from billingstack.openstack.common import policy from billingstack import exceptions from billingstack import samples -from billingstack import storage +from billingstack.central import storage from billingstack.api import service as api_service from billingstack.central import service as central_service from billingstack.openstack.common.context import RequestContext, \ @@ -16,8 +16,8 @@ cfg.CONF.import_opt('storage_driver', 'billingstack.central', group='service:central') cfg.CONF.import_opt('database_connection', - 'billingstack.storage.impl_sqlalchemy', - group='storage:sqlalchemy') + 'billingstack.central.storage.impl_sqlalchemy', + group='central:sqlalchemy') class AssertMixin(object): @@ -107,7 +107,7 @@ def setUp(self): self.config( database_connection='sqlite://', - group='storage:sqlalchemy' + group='central:sqlalchemy' ) self.samples = samples.get_samples() diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index c369093..cdd440f 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -14,7 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. from billingstack.openstack.common import log as logging -from billingstack.storage.impl_sqlalchemy import models +from billingstack.central.storage.impl_sqlalchemy import models from billingstack.tests.base import TestCase diff --git a/billingstack/tests/storage/test_sqlalchemy.py b/billingstack/tests/storage/test_sqlalchemy.py index a1aa60f..44ae77c 100644 --- a/billingstack/tests/storage/test_sqlalchemy.py +++ b/billingstack/tests/storage/test_sqlalchemy.py @@ -26,5 +26,5 @@ class SqlalchemyStorageTest(StorageDriverTestCase): def setUp(self): self.config(database_connection='sqlite://', - group='storage:sqlalchemy') + group='central:sqlalchemy') super(SqlalchemyStorageTest, self).setUp() diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample index 15db807..30c9e3f 100644 --- a/etc/billingstack/billingstack.conf.sample +++ b/etc/billingstack/billingstack.conf.sample @@ -36,7 +36,7 @@ allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack. #----------------------- # SQLAlchemy Storage #----------------------- -[storage:sqlalchemy] +[central:sqlalchemy] # Database connection string - to configure options for a given implementation # like sqlalchemy or other see below #database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack diff --git a/setup.py b/setup.py index 8920e20..3aacb88 100644 --- a/setup.py +++ b/setup.py @@ -57,8 +57,8 @@ ], cmdclass=common_setup.get_cmdclass(), entry_points=textwrap.dedent(""" - [billingstack.storage] - sqlalchemy = billingstack.storage.impl_sqlalchemy:SQLAlchemyStorage + [billingstack.central.storage] + sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine [billingstack.biller.storage] sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine diff --git a/tools/load_samples.py b/tools/load_samples.py index 9e1b02d..4c288dd 100644 --- a/tools/load_samples.py +++ b/tools/load_samples.py @@ -18,7 +18,7 @@ cfg.CONF.import_opt( 'database_connection', 'billingstack.storage.impl_sqlalchemy', - group='storage:sqlalchemy') + group='central:sqlalchemy') SAMPLES = get_samples() diff --git a/tools/resync_billingstack.py b/tools/resync_billingstack.py index dcaadf7..e4caa95 100644 --- a/tools/resync_billingstack.py +++ b/tools/resync_billingstack.py @@ -21,8 +21,8 @@ cfg.CONF.import_opt('database_connection', - 'billingstack.storage.impl_sqlalchemy', - group='storage:sqlalchemy') + 'billingstack.central.storage.impl_sqlalchemy', + group='central:sqlalchemy') if __name__ == '__main__': service.prepare_service(sys.argv) From 26d332ea0942246c69b1769e827413b5c52900f3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 26 Apr 2013 23:56:57 -0700 Subject: [PATCH 122/182] Import get_connection --- tools/load_samples.py | 4 ++-- tools/resync_billingstack.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/load_samples.py b/tools/load_samples.py index 4c288dd..0c109aa 100644 --- a/tools/load_samples.py +++ b/tools/load_samples.py @@ -6,7 +6,7 @@ from billingstack import service from billingstack.samples import get_samples -from billingstack.storage import get_connection +from billingstack.central.storage import get_connection from billingstack.openstack.common.context import get_admin_context @@ -17,7 +17,7 @@ cfg.CONF.import_opt( 'database_connection', - 'billingstack.storage.impl_sqlalchemy', + 'billingstack.central.storage.impl_sqlalchemy', group='central:sqlalchemy') diff --git a/tools/resync_billingstack.py b/tools/resync_billingstack.py index e4caa95..2f0cc61 100644 --- a/tools/resync_billingstack.py +++ b/tools/resync_billingstack.py @@ -7,7 +7,7 @@ from billingstack.openstack.common import log as logging from billingstack import service -from billingstack.storage import get_connection +from billingstack.central.storage import get_connection LOG = logging.getLogger(__name__) From 3f15c6521ac2d7ac6c45d76f229e158a6f19603e Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 27 Apr 2013 09:18:02 +0200 Subject: [PATCH 123/182] Bug fixes --- billingstack/manage/database.py | 7 +------ billingstack/payment_gateway/__init__.py | 2 +- billingstack/payment_gateway/base.py | 2 +- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py index caff3c6..06c00cf 100644 --- a/billingstack/manage/database.py +++ b/billingstack/manage/database.py @@ -16,7 +16,7 @@ from oslo.config import cfg from billingstack.openstack.common import log from billingstack.manage.base import Command -from billingstack.storage import get_connection +from billingstack.central.storage import get_connection LOG = log.getLogger(__name__) @@ -27,11 +27,6 @@ 'billingstack.central', group='service:central') -cfg.CONF.import_opt( - 'database_connection', - 'billingstack.storage.impl_sqlalchemy', - group='central:sqlalchemy') - class DatabaseCommand(Command): """ diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py index 6325db4..3e47b87 100644 --- a/billingstack/payment_gateway/__init__.py +++ b/billingstack/payment_gateway/__init__.py @@ -3,7 +3,7 @@ from billingstack import exceptions from billingstack.openstack.common import log from billingstack.payment_gateway.base import Provider -from billingstack.storage import get_connection +from billingstack.central.storage import get_connection LOG = log.getLogger(__name__) diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py index 4c0c973..f60ce2f 100644 --- a/billingstack/payment_gateway/base.py +++ b/billingstack/payment_gateway/base.py @@ -1,4 +1,4 @@ -from billingstack.storage import get_connection +from billingstack.central.storage import get_connection from billingstack.plugin import Plugin From 2f1b986e8ffa7b284381cad5a5befd0be37f26b4 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 27 Apr 2013 09:28:39 +0200 Subject: [PATCH 124/182] PEP8 --- billingstack/api/v1/resources.py | 2 +- billingstack/central/storage/impl_sqlalchemy/migration/cli.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 142d1db..87a4317 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -703,7 +703,7 @@ def list_usages(merchant_id, q=[]): @signature([models.Usage], str, str) def get_usage(merchant_id, usage_id): row = rater_api.get_usage(request.environ['context'], - usage_id) + usage_id) return models.Usage.from_db(row) diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/cli.py b/billingstack/central/storage/impl_sqlalchemy/migration/cli.py index 5d81a37..24008e1 100644 --- a/billingstack/central/storage/impl_sqlalchemy/migration/cli.py +++ b/billingstack/central/storage/impl_sqlalchemy/migration/cli.py @@ -116,7 +116,8 @@ def main(): ) config.set_main_option( 'script_location', - 'billingstack.central.storage.impl_sqlalchemy.migration:alembic_migrations') + 'billingstack.central.storage' + '.impl_sqlalchemy.migration:alembic_migrations') # attach the Quantum conf to the Alembic conf config.billingstack_config = CONF From 9b29f1ce5027d7d1bcb57444ad68bbf570831ab9 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 9 May 2013 23:48:28 +0200 Subject: [PATCH 125/182] Bring project up to OS standards Change-Id: Id076465fb00bdb3214704b2f0cadf7ceea5dc31a --- .gitreview | 4 +++ README.md | 26 +++---------------- .../biller/storage/impl_sqlalchemy.py | 3 --- .../storage/impl_sqlalchemy/__init__.py | 1 - billingstack/tests/payment_gateway/base.py | 6 ++++- setup.py | 9 ++++--- tools/pip-requires | 20 +++++++------- tools/test-requires | 13 +++++----- 8 files changed, 35 insertions(+), 47 deletions(-) create mode 100644 .gitreview diff --git a/.gitreview b/.gitreview new file mode 100644 index 0000000..dc4afc4 --- /dev/null +++ b/.gitreview @@ -0,0 +1,4 @@ +[gerrit] +host=review.openstack.org +port=29418 +project=stackforge/billingstack.git diff --git a/README.md b/README.md index 2efbd4c..fe84973 100644 --- a/README.md +++ b/README.md @@ -1,26 +1,8 @@ -billingstack +BillingStack ============ -What is Billingstack? -BillingStack is a convergence of efforts done in the previous started Bufunfa -project and the BillingStack Grails (Java) version by Luis Gervaso. - -The goal is to provide a free alternative to anyone that has a need for a -subscription based billingsystem with features compared to other popular ones. - - -Features include: -* Plans - Collections of Products like Compute Gold or similar -* Products - A Compute server for example -* Merchants - Multi-Tenancy via Merchants where the Merchant is the Tenant of - the application and can have multiple Customers, it's own - settings etc. - -* Plugin based Storage API - The Storage API is pluggable and other backends - can be added. -* REST API - Currently based on Pecan for V1. - +Site: www.billingstack.org Docs: http://billingstack.rtfd.org -Github: http://github.com/billingstack/billingstack -Bugs: http://github.com/billingstack/billingstack \ No newline at end of file +Github: http://github.com/stackforge/billingstack +Bugs: http://launchpad.net/billingstack diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py index ee61f51..82205ea 100644 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -15,13 +15,10 @@ A Usage plugin using sqlalchemy... """ from oslo.config import cfg -from sqlalchemy import Column -from sqlalchemy import Unicode, Float, DateTime from sqlalchemy.ext.declarative import declarative_base from billingstack.openstack.common import log as logging from billingstack.biller.storage import Connection, StorageEngine -from billingstack.sqlalchemy.types import UUID from billingstack.sqlalchemy import api, model_base, session diff --git a/billingstack/central/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py index 9223177..a73ef88 100644 --- a/billingstack/central/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/central/storage/impl_sqlalchemy/__init__.py @@ -18,7 +18,6 @@ from billingstack import utils as common_utils from billingstack.sqlalchemy import utils as db_utils, api from billingstack.sqlalchemy.session import SQLOPTS -from billingstack.storage import base from billingstack.central.storage import Connection, StorageEngine from billingstack.central.storage.impl_sqlalchemy import models diff --git a/billingstack/tests/payment_gateway/base.py b/billingstack/tests/payment_gateway/base.py index 14b516b..23b3bf9 100644 --- a/billingstack/tests/payment_gateway/base.py +++ b/billingstack/tests/payment_gateway/base.py @@ -23,14 +23,18 @@ def setUp(self): def test_create_account(self): expected = self.pgp.create_account(self.customer) + actual = self.pgp.get_account(self.customer['id']) + self.assertEqual(expected['id'], actual['id']) def test_list_accounts(self): - expected = self.pgp.create_account(self.customer) + self.pgp.create_account(self.customer) actual = self.pgp.list_accounts() + self.assertLen(0, actual) def test_get_account(self): expected = self.pgp.create_account(self.customer) actual = self.pgp.get_account(self.customer['id']) + self.assertEqual(expected['id'], actual['id']) def test_delete_account(self): data = self.pgp.create_account(self.customer) diff --git a/setup.py b/setup.py index 3aacb88..8e0d01b 100644 --- a/setup.py +++ b/setup.py @@ -58,13 +58,16 @@ cmdclass=common_setup.get_cmdclass(), entry_points=textwrap.dedent(""" [billingstack.central.storage] - sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine + sqlalchemy = billingstack.central.storage.impl_sqlalchemy\ + :SQLAlchemyEngine [billingstack.biller.storage] - sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine + sqlalchemy = billingstack.biller.storage.impl_sqlalchemy\ + :SQLAlchemyEngine [billingstack.rater.storage] - sqlalchemy = billingstack.rater.storage.impl_sqlalchemy:SQLAlchemyEngine + sqlalchemy = billingstack.rater.storage.impl_sqlalchemy\ + :SQLAlchemyEngine [billingstack.payment_gateway] dummy = billingstack.payment_gateway.dummy:DummyProvider diff --git a/tools/pip-requires b/tools/pip-requires index 32be233..d5ccae9 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -1,17 +1,15 @@ +# This file is managed by openstack-depends +argparse +cliff +eventlet +extras Flask +iso8601>=0.1.4 +oslo.config>=1.1.0 Paste PasteDeploy -eventlet -stevedore -argparse --e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme pycountry -cliff -#http://tarballs.openstack.org/oslo-config/oslo-config-master.tar.gz#egg=oslo-config -oslo.config>=1.1.0 - -# From OpenStack Common routes>=1.12.3 -iso8601>=0.1.4 +stevedore WebOb>=1.0.8 -extras +wsme diff --git a/tools/test-requires b/tools/test-requires index 4736efb..2510bec 100644 --- a/tools/test-requires +++ b/tools/test-requires @@ -1,11 +1,12 @@ -unittest2 -nose -openstack.nose_plugin -nosehtmloutput +# This file is managed by openstack-depends +Babel>=0.9.6 coverage +docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. mock mox -Babel>=0.9.6 +nose +nosehtmloutput +openstack.nose_plugin sphinx sphinxcontrib-httpdomain -docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. +unittest2 From fa2a43cd3c1d701b33b0d09fdb8475604dc7756e Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 10 May 2013 10:40:35 +0200 Subject: [PATCH 126/182] Add HACKING Change-Id: I65829ef496dd958699e21cbda9ee16a4e8f9f9cf --- HACKING.rst | 253 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 253 insertions(+) create mode 100644 HACKING.rst diff --git a/HACKING.rst b/HACKING.rst new file mode 100644 index 0000000..5153db1 --- /dev/null +++ b/HACKING.rst @@ -0,0 +1,253 @@ +BillingStack Style Commandments +=============================== + +- Step 1: Read http://www.python.org/dev/peps/pep-0008/ +- Step 2: Read http://www.python.org/dev/peps/pep-0008/ again +- Step 3: Read on + + +General +------- +- Put two newlines between top-level code (funcs, classes, etc) +- Put one newline between methods in classes and anywhere else +- Do not write "except:", use "except Exception:" at the very least +- Include your name with TODOs as in "#TODO(termie)" +- Do not name anything the same name as a built-in or reserved word +- Use the "is not" operator when testing for unequal identities. Example:: + + if not X is Y: # BAD, intended behavior is ambiguous + pass + + if X is not Y: # OKAY, intuitive + pass + +- Use the "not in" operator for evaluating membership in a collection. Example:: + + if not X in Y: # BAD, intended behavior is ambiguous + pass + + if X not in Y: # OKAY, intuitive + pass + + if not (X in Y or X in Z): # OKAY, still better than all those 'not's + pass + + +Imports +------- +- Do not make relative imports +- Order your imports by the full module path +- Organize your imports according to the following template + +Example:: + + # vim: tabstop=4 shiftwidth=4 softtabstop=4 + {{stdlib imports in human alphabetical order}} + \n + {{third-party lib imports in human alphabetical order}} + \n + {{billingstack imports in human alphabetical order}} + \n + \n + {{begin your code}} + + +Human Alphabetical Order Examples +--------------------------------- +Example:: + + import httplib + import logging + import random + import StringIO + import time + import unittest + + import eventlet + import webob.exc + + from billingstack.api import v1 + from billingstack.central import rpc_api + from billingstack.rater import rpc_api + + +Docstrings +---------- + +Docstrings are required for all functions and methods. + +Docstrings should ONLY use triple-double-quotes (``"""``) + +Single-line docstrings should NEVER have extraneous whitespace +between enclosing triple-double-quotes. + +**INCORRECT** :: + + """ There is some whitespace between the enclosing quotes :( """ + +**CORRECT** :: + + """There is no whitespace between the enclosing quotes :)""" + +Docstrings that span more than one line should look like this: + +Example:: + + """ + Start the docstring on the line following the opening triple-double-quote + + If you are going to describe parameters and return values, use Sphinx, the + appropriate syntax is as follows. + + :param foo: the foo parameter + :param bar: the bar parameter + :returns: return_type -- description of the return value + :returns: description of the return value + :raises: AttributeError, KeyError + """ + +**DO NOT** leave an extra newline before the closing triple-double-quote. + + +Dictionaries/Lists +------------------ +If a dictionary (dict) or list object is longer than 80 characters, its items +should be split with newlines. Embedded iterables should have their items +indented. Additionally, the last item in the dictionary should have a trailing +comma. This increases readability and simplifies future diffs. + +Example:: + + my_dictionary = { + "image": { + "name": "Just a Snapshot", + "size": 2749573, + "properties": { + "user_id": 12, + "arch": "x86_64", + }, + "things": [ + "thing_one", + "thing_two", + ], + "status": "ACTIVE", + }, + } + + +Calling Methods +--------------- +Calls to methods 80 characters or longer should format each argument with +newlines. This is not a requirement, but a guideline:: + + unnecessarily_long_function_name('string one', + 'string two', + kwarg1=constants.ACTIVE, + kwarg2=['a', 'b', 'c']) + + +Rather than constructing parameters inline, it is better to break things up:: + + list_of_strings = [ + 'what_a_long_string', + 'not as long', + ] + + dict_of_numbers = { + 'one': 1, + 'two': 2, + 'twenty four': 24, + } + + object_one.call_a_method('string three', + 'string four', + kwarg1=list_of_strings, + kwarg2=dict_of_numbers) + + +Internationalization (i18n) Strings +----------------------------------- +In order to support multiple languages, we have a mechanism to support +automatic translations of exception and log strings. + +Example:: + + msg = _("An error occurred") + raise HTTPBadRequest(explanation=msg) + +If you have a variable to place within the string, first internationalize the +template string then do the replacement. + +Example:: + + msg = _("Missing parameter: %s") % ("flavor",) + LOG.error(msg) + +If you have multiple variables to place in the string, use keyword parameters. +This helps our translators reorder parameters when needed. + +Example:: + + msg = _("The server with id %(s_id)s has no key %(m_key)s") + LOG.error(msg % {"s_id": "1234", "m_key": "imageId"}) + + +Creating Unit Tests +------------------- +For every new feature, unit tests should be created that both test and +(implicitly) document the usage of said feature. If submitting a patch for a +bug that had no unit test, a new passing unit test should be added. If a +submitted bug fix does have a unit test, be sure to add a new one that fails +without the patch and passes with the patch. + + +Commit Messages +--------------- +Using a common format for commit messages will help keep our git history +readable. Follow these guidelines: + + First, provide a brief summary of 50 characters or less. Summaries + of greater then 72 characters will be rejected by the gate. + + The first line of the commit message should provide an accurate + description of the change, not just a reference to a bug or + blueprint. It must be followed by a single blank line. + + Following your brief summary, provide a more detailed description of + the patch, manually wrapping the text at 72 characters. This + description should provide enough detail that one does not have to + refer to external resources to determine its high-level functionality. + + Once you use 'git review', two lines will be appended to the commit + message: a blank line followed by a 'Change-Id'. This is important + to correlate this commit with a specific review in Gerrit, and it + should not be modified. + +For further information on constructing high quality commit messages, +and how to split up commits into a series of changes, consult the +project wiki: + + http://wiki.openstack.org/GitCommitMessages + + +openstack-common +---------------- + +A number of modules from openstack-common are imported into the project. + +These modules are "incubating" in openstack-common and are kept in sync +with the help of openstack-common's update.py script. See: + + http://wiki.openstack.org/CommonLibrary#Incubation + +The copy of the code should never be directly modified here. Please +always update openstack-common first and then run the script to copy +the changes across. + + +Logging +------- +Use __name__ as the name of your logger and name your module-level logger +objects 'LOG':: + + LOG = logging.getLogger(__name__) From 1c5f95240210e8e4f9de621b164f30514fe21a1f Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 10 May 2013 14:51:30 +0200 Subject: [PATCH 127/182] Moved to stackforge Change-Id: I723cfb56cf9a114aa3fcd0095f0acacec8b225bf --- doc/source/developing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/developing.rst b/doc/source/developing.rst index 6e4e991..73ff1ff 100644 --- a/doc/source/developing.rst +++ b/doc/source/developing.rst @@ -32,7 +32,7 @@ There are 2 ways to setting up a development environment 1. Clone the repo - see :ref:`cloning-git` for generic information:: - $ git clone http://github.com/billingstack/billingstack + $ git clone http://github.com/stackforge/billingstack 2. Change directory to the BS directory:: From 0316d2653506d8f26630bade328be2e93d417706 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 11 May 2013 15:46:48 +0200 Subject: [PATCH 128/182] Re-factor tests * Silo tests for services * Use testr * Better support for services in tests Change-Id: I77cad98c973f04dc0384e2c78cbca30089b9755b --- .testr.conf | 4 + billingstack/tests/api/base.py | 9 +- billingstack/tests/api/v1/test_currency.py | 2 +- billingstack/tests/api/v1/test_customer.py | 3 +- .../tests/api/v1/test_invoice_state.py | 2 +- billingstack/tests/api/v1/test_language.py | 2 +- billingstack/tests/api/v1/test_merchant.py | 3 +- .../tests/api/v1/test_payment_method.py | 2 +- billingstack/tests/api/v1/test_plan.py | 2 +- billingstack/tests/api/v1/test_product.py | 2 +- billingstack/tests/base.py | 159 ++++-- billingstack/tests/biller/storage/__init__.py | 0 billingstack/tests/central/__init__.py | 8 + .../tests/central/storage/__init__.py | 0 billingstack/tests/central/storage/base.py | 496 +++++++++++++++++ .../{ => central}/storage/test_sqlalchemy.py | 2 +- .../tests/collector/storage/__init__.py | 0 billingstack/tests/rater/storage/__init__.py | 0 billingstack/tests/storage/__init__.py | 501 ------------------ tools/patch_tox_venv.py | 5 +- tools/test-requires | 3 + tox.ini | 58 +- 22 files changed, 668 insertions(+), 595 deletions(-) create mode 100644 .testr.conf create mode 100644 billingstack/tests/biller/storage/__init__.py create mode 100644 billingstack/tests/central/__init__.py create mode 100644 billingstack/tests/central/storage/__init__.py create mode 100644 billingstack/tests/central/storage/base.py rename billingstack/tests/{ => central}/storage/test_sqlalchemy.py (93%) create mode 100644 billingstack/tests/collector/storage/__init__.py create mode 100644 billingstack/tests/rater/storage/__init__.py diff --git a/.testr.conf b/.testr.conf new file mode 100644 index 0000000..8737d52 --- /dev/null +++ b/.testr.conf @@ -0,0 +1,4 @@ +[DEFAULT] +test_command=OS_STDOUT_CAPTURE=1 OS_STDERR_CAPTURE=1 ${PYTHON:-python} -m subunit.run discover -t ./ billingstack/tests $LISTOPT $IDOPTION +test_id_option=--load-list $IDFILE +test_list_option=--list \ No newline at end of file diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 037b525..72455df 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -21,7 +21,7 @@ from billingstack.api.auth import NoAuthContextMiddleware from billingstack.openstack.common import jsonutils as json from billingstack.openstack.common import log -from billingstack.tests.base import TestCase +from billingstack.tests.base import ServiceTestCase LOG = log.getLogger(__name__) @@ -147,7 +147,7 @@ def delete(self, path, status_code=204, headers=None, q=[], **params): return response -class FunctionalTest(TestCase, APITestMixin): +class FunctionalTest(ServiceTestCase, APITestMixin): """ billingstack.api base test """ @@ -155,8 +155,7 @@ def setUp(self): super(FunctionalTest, self).setUp() # NOTE: Needs to be started after the db schema is created - self.central_service = self.get_central_service() - self.central_service.start() + self.start_service() self.setSamples() @@ -166,5 +165,5 @@ def setUp(self): self.client = self.app.test_client() def tearDown(self): - self.central_service.stop() + self.services.central.stop() super(FunctionalTest, self).tearDown() diff --git a/billingstack/tests/api/v1/test_currency.py b/billingstack/tests/api/v1/test_currency.py index dea455e..34a7ec0 100644 --- a/billingstack/tests/api/v1/test_currency.py +++ b/billingstack/tests/api/v1/test_currency.py @@ -63,5 +63,5 @@ def test_delete_currency(self): url = self.item_path(currency['name']) self.delete(url) - data = self.central_service.list_currencies(self.admin_ctxt) + data = self.services.central.list_currencies(self.admin_ctxt) self.assertLen(1, data) diff --git a/billingstack/tests/api/v1/test_customer.py b/billingstack/tests/api/v1/test_customer.py index 44ef24e..5deca31 100644 --- a/billingstack/tests/api/v1/test_customer.py +++ b/billingstack/tests/api/v1/test_customer.py @@ -79,4 +79,5 @@ def test_delete_customer(self): url = self.item_path(self.merchant['id'], customer['id']) self.delete(url) - self.assertLen(0, self.central_service.list_customers(self.admin_ctxt)) + self.assertLen(0, self.services.central.list_customers( + self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_invoice_state.py b/billingstack/tests/api/v1/test_invoice_state.py index 97c74fa..a0f31ad 100644 --- a/billingstack/tests/api/v1/test_invoice_state.py +++ b/billingstack/tests/api/v1/test_invoice_state.py @@ -64,5 +64,5 @@ def test_delete_invoice_state(self): url = self.item_path(state['name']) self.delete(url) - data = self.central_service.list_invoice_states(self.admin_ctxt) + data = self.services.central.list_invoice_states(self.admin_ctxt) self.assertLen(0, data) diff --git a/billingstack/tests/api/v1/test_language.py b/billingstack/tests/api/v1/test_language.py index 70a329f..8ed1195 100644 --- a/billingstack/tests/api/v1/test_language.py +++ b/billingstack/tests/api/v1/test_language.py @@ -63,5 +63,5 @@ def test_delete_language(self): url = self.item_path(language['name']) self.delete(url) - data = self.central_service.list_languages(self.admin_ctxt) + data = self.services.central.list_languages(self.admin_ctxt) self.assertLen(1, data) diff --git a/billingstack/tests/api/v1/test_merchant.py b/billingstack/tests/api/v1/test_merchant.py index 647d929..d53e2eb 100644 --- a/billingstack/tests/api/v1/test_merchant.py +++ b/billingstack/tests/api/v1/test_merchant.py @@ -57,4 +57,5 @@ def test_update_merchant(self): def test_delete_merchant(self): self.delete('merchants/' + self.merchant['id']) - self.assertLen(0, self.central_service.list_merchants(self.admin_ctxt)) + self.assertLen(0, self.services.central.list_merchants( + self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_payment_method.py b/billingstack/tests/api/v1/test_payment_method.py index 17e4a87..ef9b724 100644 --- a/billingstack/tests/api/v1/test_payment_method.py +++ b/billingstack/tests/api/v1/test_payment_method.py @@ -96,4 +96,4 @@ def test_delete_payment_method(self): self.customer['id'], method['id']) self.delete(url) - self.assertLen(0, self.central_service.list_products(self.admin_ctxt)) + self.assertLen(0, self.services.central.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_plan.py b/billingstack/tests/api/v1/test_plan.py index 0dd93d5..10172ae 100644 --- a/billingstack/tests/api/v1/test_plan.py +++ b/billingstack/tests/api/v1/test_plan.py @@ -64,4 +64,4 @@ def test_delete_plan(self): url = self.item_path(self.merchant['id'], plan['id']) self.delete(url) - self.assertLen(0, self.central_service.list_plans(self.admin_ctxt)) + self.assertLen(0, self.services.central.list_plans(self.admin_ctxt)) diff --git a/billingstack/tests/api/v1/test_product.py b/billingstack/tests/api/v1/test_product.py index c539c0f..c794ccd 100644 --- a/billingstack/tests/api/v1/test_product.py +++ b/billingstack/tests/api/v1/test_product.py @@ -67,4 +67,4 @@ def test_delete_product(self): url = self.item_path(self.merchant['id'], product['id']) self.delete(url) - self.assertLen(0, self.central_service.list_products(self.admin_ctxt)) + self.assertLen(0, self.services.central.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 3d19959..b0f52f8 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -6,18 +6,14 @@ # from billingstack.openstack.common import policy from billingstack import exceptions from billingstack import samples -from billingstack.central import storage -from billingstack.api import service as api_service -from billingstack.central import service as central_service from billingstack.openstack.common.context import RequestContext, \ get_admin_context +from billingstack.openstack.common import importutils -cfg.CONF.import_opt('storage_driver', 'billingstack.central', - group='service:central') -cfg.CONF.import_opt('database_connection', - 'billingstack.central.storage.impl_sqlalchemy', - group='central:sqlalchemy') +cfg.CONF.import_opt( + 'rpc_backend', + 'billingstack.openstack.common.rpc.impl_fake') class AssertMixin(object): @@ -92,67 +88,130 @@ def get_context(self, **kw): return RequestContext(**kw) -class TestCase(BaseTestCase): - def setUp(self): - super(TestCase, self).setUp() +class Services(dict): + def __getattr__(self, name): + if name not in self: + raise AttributeError(name) + return self[name] - self.config( - rpc_backend='billingstack.openstack.common.rpc.impl_fake', - ) + def __setattr__(self, name, value): + self[name] = value - self.config( - storage_driver='sqlalchemy', - group='service:central' - ) - self.config( - database_connection='sqlite://', - group='central:sqlalchemy' - ) +class TestCase(BaseTestCase): + def setUp(self): + super(TestCase, self).setUp() self.samples = samples.get_samples() + self.admin_ctxt = self.get_admin_context() - storage.setup_schema() + self.config(rpc_backend='billingstack.openstack.common.rpc.impl_fake') - self.admin_ctxt = self.get_admin_context() + # NOTE: No services up by default + self.services = Services() def tearDown(self): # NOTE: Currently disabled #policy.reset() + storage = self.get_storage_connection() storage.teardown_schema() super(TestCase, self).tearDown() - def get_storage_driver(self): + def get_storage_connection(self, service='central'): + storage = importutils.import_module('billingstack.%s.storage' % + service) connection = storage.get_connection() return connection - def get_central_service(self): - return central_service.Service() + def get_service(self, service='central'): - def get_api_service(self): - return api_service.Service() + svc = importutils.import_class('billingstack.%s.service.Service' % + service) + return svc() + + def start_service(self, service='central'): + self.config( + storage_driver='sqlalchemy', + group='service:%s' % service + ) + + self.config( + database_connection='sqlite://', + group='%s:sqlalchemy' % service + ) + + storage = self.get_storage_connection(service=service) + storage.setup_schema() + + svc = self.get_service(service=service) + svc.start() + self.services[service] = svc def setSamples(self): _, self.currency = self.create_currency() _, self.language = self.create_language() _, self.merchant = self.create_merchant() + def _account_defaults(self, values): + # NOTE: Do defaults + if not 'currency_name' in values: + values['currency_name'] = self.currency['name'] + + if not 'language_name' in values: + values['language_name'] = self.language['name'] + + def create_language(self, fixture=0, values={}, **kw): + raise NotImplementedError + + def create_currency(self, fixture=0, values={}, **kw): + raise NotImplementedError + + def crealfte_invoice_state(self, fixture=0, values={}, **kw): + raise NotImplementedError + + def pg_provider_register(self, fixture=0, values={}, **kw): + raise NotImplementedError + + def create_merchant(self, fixture=0, values={}, **kw): + raise NotImplementedError + + def create_pg_config(self, merchant_id, fixture=0, values={}, + **kw): + raise NotImplementedError + + def create_customer(self, merchant_id, fixture=0, values={}, **kw): + raise NotImplementedError + + def create_payment_method(self, customer_id, fixture=0, values={}, **kw): + raise NotImplementedError + + def user_add(self, merchant_id, fixture=0, values={}, **kw): + raise NotImplementedError + + def create_product(self, merchant_id, fixture=0, values={}, **kw): + raise NotImplementedError + + def create_plan(self, merchant_id, fixture=0, values={}, **kw): + raise NotImplementedError + + +class ServiceTestCase(TestCase): def create_language(self, fixture=0, values={}, **kw): fixture = self.get_fixture('language', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_language(ctxt, fixture, - **kw) + return fixture, self.services.central.create_language(ctxt, fixture, + **kw) def create_currency(self, fixture=0, values={}, **kw): fixture = self.get_fixture('currency', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_currency(ctxt, fixture, - **kw) + return fixture, self.services.central.create_currency(ctxt, fixture, + **kw) def create_invoice_state(self, fixture=0, values={}, **kw): fixture = self.get_fixture('invoice_state', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_invoice_state( + return fixture, self.services.central.create_invoice_state( ctxt, fixture, **kw) def pg_provider_register(self, fixture=0, values={}, **kw): @@ -161,61 +220,61 @@ def pg_provider_register(self, fixture=0, values={}, **kw): fixture['methods'] = [self.get_fixture('pg_method')] ctxt = kw.pop('context', self.admin_ctxt) - data = self.central_service.pg_provider_register(ctxt, fixture, **kw) + data = self.services.central.pg_provider_register(ctxt, fixture, **kw) return fixture, data - def _account_defaults(self, values): - # NOTE: Do defaults - if not 'currency_name' in values: - values['currency_name'] = self.currency['name'] - - if not 'language_name' in values: - values['language_name'] = self.language['name'] - def create_merchant(self, fixture=0, values={}, **kw): fixture = self.get_fixture('merchant', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.central_service.create_merchant( + return fixture, self.services.central.create_merchant( ctxt, fixture, **kw) def create_pg_config(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_pg_config( + return fixture, self.services.central.create_pg_config( ctxt, merchant_id, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) self._account_defaults(fixture) - return fixture, self.central_service.create_customer( + return fixture, self.services.central.create_customer( ctxt, merchant_id, fixture, **kw) def create_payment_method(self, customer_id, fixture=0, values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_payment_method( + return fixture, self.services.central.create_payment_method( ctxt, customer_id, fixture, **kw) def user_add(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('user', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.user_add( + return fixture, self.services.central.user_add( ctxt, merchant_id, fixture, **kw) def create_product(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('product', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_product( + return fixture, self.services.central.create_product( ctxt, merchant_id, fixture, **kw) def create_plan(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('plan', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.central_service.create_plan( + return fixture, self.services.central.create_plan( ctxt, merchant_id, fixture, **kw) + + +class StorageTestCase(TestCase): + def setUp(self): + super(StorageTestCase, self).setUp() + self.storage_conn = self.get_storage_connection() + self.storage_conn.setup_schema() + self.setSamples() diff --git a/billingstack/tests/biller/storage/__init__.py b/billingstack/tests/biller/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/tests/central/__init__.py b/billingstack/tests/central/__init__.py new file mode 100644 index 0000000..9163550 --- /dev/null +++ b/billingstack/tests/central/__init__.py @@ -0,0 +1,8 @@ +from oslo.config import cfg + + +cfg.CONF.import_opt('storage_driver', 'billingstack.central', + group='service:central') +cfg.CONF.import_opt('database_connection', + 'billingstack.central.storage.impl_sqlalchemy', + group='central:sqlalchemy') diff --git a/billingstack/tests/central/storage/__init__.py b/billingstack/tests/central/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/tests/central/storage/base.py b/billingstack/tests/central/storage/base.py new file mode 100644 index 0000000..716aecf --- /dev/null +++ b/billingstack/tests/central/storage/base.py @@ -0,0 +1,496 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack.openstack.common import log as logging +from billingstack.central.storage.impl_sqlalchemy import models +from billingstack.tests.base import StorageTestCase + + +LOG = logging.getLogger(__name__) + + +UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' + + +class StorageDriverTestCase(StorageTestCase): + __test__ = False + + def create_language(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('language', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) + + def create_currency(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('currency', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) + + def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): + methods = [self.get_fixture('pg_method')] or methods + if not 'methods' in values: + values['methods'] = methods + + fixture = self.get_fixture('pg_provider', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + + data = self.storage_conn.pg_provider_register( + ctxt, fixture.copy(), **kw) + + return fixture, data + + def create_merchant(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('merchant', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + + self._account_defaults(fixture) + + return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) + + def create_pg_config(self, merchant_id, fixture=0, values={}, + **kw): + fixture = self.get_fixture('pg_config', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_pg_config( + ctxt, merchant_id, fixture, **kw) + + def create_customer(self, merchant_id, fixture=0, values={}, **kw): + fixture = self.get_fixture('customer', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + self._account_defaults(fixture) + return fixture, self.storage_conn.create_customer( + ctxt, merchant_id, fixture, **kw) + + def create_payment_method(self, customer_id, fixture=0, + values={}, **kw): + fixture = self.get_fixture('payment_method', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_payment_method( + ctxt, customer_id, fixture, **kw) + + def create_product(self, merchant_id, fixture=0, values={}, **kw): + fixture = self.get_fixture('product', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_product( + ctxt, merchant_id, fixture, **kw) + + def create_plan(self, merchant_id, fixture=0, values={}, **kw): + fixture = self.get_fixture('plan', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_plan( + ctxt, merchant_id, fixture, **kw) + + # Currencies + def test_create_currency(self): + self.assertDuplicate(self.create_currency) + + # Languages + def test_create_language(self): + self.assertDuplicate(self.create_language) + + def test_set_properties(self): + fixture, data = self.create_product(self.merchant['id']) + + metadata = {"random": True} + self.storage_conn.set_properties(data['id'], metadata, + cls=models.Product) + + metadata.update({'foo': 1, 'bar': 2}) + self.storage_conn.set_properties(data['id'], metadata, + cls=models.Product) + + actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) + self.assertLen(6, actual['properties']) + + # Payment Gateways + def test_pg_provider_register(self): + fixture, actual = self.pg_provider_register() + self.assertEqual(fixture['name'], actual['name']) + self.assertEqual(fixture['title'], actual['title']) + self.assertEqual(fixture['description'], actual['description']) + self.assertData(fixture['methods'][0], actual['methods'][0]) + + def test_pg_provider_register_different_methods(self): + # Add a Global method + method1 = {'type': 'creditcard', 'name': 'mastercard'} + method2 = {'type': 'creditcard', 'name': 'amex'} + method3 = {'type': 'creditcard', 'name': 'visa'} + + provider = {'name': 'noop', 'methods': [method1, method2, method3]} + + provider = self.storage_conn.pg_provider_register( + self.admin_ctxt, provider) + + # TODO(ekarls): Make this more extensive? + self.assertLen(3, provider['methods']) + + def test_get_pg_provider(self): + _, expected = self.pg_provider_register() + actual = self.storage_conn.get_pg_provider(self.admin_ctxt, + expected['id']) + self.assertData(expected, actual) + + def test_get_pg_provider_missing(self): + self.assertMissing(self.storage_conn.get_pg_provider, + self.admin_ctxt, UUID) + + def test_pg_provider_deregister(self): + _, data = self.pg_provider_register() + self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.pg_provider_deregister, + self.admin_ctxt, data['id']) + + def test_pg_provider_deregister_missing(self): + self.assertMissing(self.storage_conn.pg_provider_deregister, + self.admin_ctxt, UUID) + + # Payment Gateway Configuration + def test_create_pg_config(self): + _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + fixture, data = self.create_pg_config( + self.merchant['id'], values=values) + + self.assertData(fixture, data) + + def test_get_pg_config(self): + _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + + fixture, data = self.create_pg_config( + self.merchant['id'], values=values) + + def test_get_pg_config_missing(self): + self.assertMissing(self.storage_conn.get_pg_config, + self.admin_ctxt, UUID) + + def test_update_pg_config(self): + _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + + fixture, data = self.create_pg_config( + self.merchant['id'], values=values) + + fixture['properties'] = {"api": 1} + updated = self.storage_conn.update_pg_config( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_pg_config_missing(self): + _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + + fixture, data = self.create_pg_config( + self.merchant['id'], values=values) + + self.assertMissing(self.storage_conn.update_pg_config, + self.admin_ctxt, UUID, {}) + + def test_delete_pg_config(self): + _, provider = self.pg_provider_register() + + values = {'provider_id': provider['id']} + + fixture, data = self.create_pg_config( + self.merchant['id'], values=values) + + self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_pg_config, + self.admin_ctxt, data['id']) + + def test_delete_pg_config_missing(self): + self.assertMissing(self.storage_conn.delete_pg_config, + self.admin_ctxt, UUID) + + # PaymentMethod + def test_create_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) + _, customer = self.create_customer(self.merchant['id']) + + # Setup PaymentMethod + values = { + 'provider_config_id': config['id']} + + fixture, data = self.create_payment_method( + customer['id'], values=values) + self.assertData(fixture, data) + + def test_get_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) + _, customer = self.create_customer(self.merchant['id']) + + # Setup PaymentMethod + values = { + 'provider_config_id': config['id']} + + _, expected = self.create_payment_method( + customer['id'], values=values) + actual = self.storage_conn.get_payment_method(self.admin_ctxt, + expected['id']) + self.assertData(expected, actual) + + # TODO(ekarlso): Make this test more extensive? + def test_list_payment_methods(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) + + values = { + 'provider_config_id': config['id']} + + # Add two Customers with some methods + _, customer1 = self.create_customer(self.merchant['id']) + self.create_payment_method( + customer1['id'], values=values) + rows = self.storage_conn.list_payment_methods( + self.admin_ctxt, + criterion={'customer_id': customer1['id']}) + self.assertLen(1, rows) + + _, customer2 = self.create_customer(self.merchant['id']) + self.create_payment_method( + customer2['id'], values=values) + self.create_payment_method( + customer2['id'], values=values) + rows = self.storage_conn.list_payment_methods( + self.admin_ctxt, + criterion={'customer_id': customer2['id']}) + self.assertLen(2, rows) + + def test_get_payment_method_missing(self): + self.assertMissing(self.storage_conn.get_payment_method, + self.admin_ctxt, UUID) + + def test_update_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) + _, customer = self.create_customer(self.merchant['id']) + + # Setup PaymentMethod + values = { + 'provider_config_id': config['id']} + + fixture, data = self.create_payment_method( + customer['id'], values=values) + + fixture['identifier'] = 1 + updated = self.storage_conn.update_payment_method(self.admin_ctxt, + data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_payment_method_missing(self): + self.assertMissing(self.storage_conn.update_payment_method, + self.admin_ctxt, UUID, {}) + + def test_delete_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + _, config = self.create_pg_config( + self.merchant['id'], values={'provider_id': provider['id']}) + _, customer = self.create_customer(self.merchant['id']) + + # Setup PaymentMethod + values = { + 'provider_config_id': config['id']} + + fixture, data = self.create_payment_method( + customer['id'], values=values) + + self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_payment_method, + self.admin_ctxt, data['id']) + + def test_delete_payment_method_missing(self): + self.assertMissing(self.storage_conn.delete_payment_method, + self.admin_ctxt, UUID) + + # Merchant + def test_create_merchant(self): + fixture, data = self.create_merchant() + self.assertData(fixture, data) + + def test_get_merchant(self): + _, expected = self.create_merchant() + actual = self.storage_conn.get_merchant( + self.admin_ctxt, expected['id']) + self.assertData(expected, actual) + + def test_get_merchant_missing(self): + self.assertMissing(self.storage_conn.get_merchant, + self.admin_ctxt, UUID) + + def test_update_merchant(self): + fixture, data = self.create_merchant() + + fixture['name'] = 'test' + updated = self.storage_conn.update_merchant( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_merchant_missing(self): + self.assertMissing(self.storage_conn.update_merchant, + self.admin_ctxt, UUID, {}) + + def test_delete_merchant(self): + self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) + self.assertMissing(self.storage_conn.get_merchant, + self.admin_ctxt, self.merchant['id']) + + def test_delete_merchant_missing(self): + self.assertMissing(self.storage_conn.delete_merchant, + self.admin_ctxt, UUID) + + # Customer + def test_create_customer(self): + fixture, data = self.create_customer(self.merchant['id']) + assert data['default_info'] == {} + assert data['contact_info'] == [] + self.assertData(fixture, data) + + def test_create_customer_with_contact_info(self): + contact_fixture = self.get_fixture('contact_info') + customer_fixture, data = self.create_customer( + self.merchant['id'], + values={'contact_info': contact_fixture}) + self.assertData(customer_fixture, data) + self.assertData(contact_fixture, data['default_info']) + self.assertData(contact_fixture, data['contact_info'][0]) + + def test_get_customer(self): + _, expected = self.create_customer(self.merchant['id']) + actual = self.storage_conn.get_customer( + self.admin_ctxt, expected['id']) + self.assertData(expected, actual) + + def test_get_customer_missing(self): + self.assertMissing(self.storage_conn.get_customer, + self.admin_ctxt, UUID) + + def test_update_customer(self): + fixture, data = self.create_customer(self.merchant['id']) + + fixture['name'] = 'test' + updated = self.storage_conn.update_customer( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_customer_missing(self): + self.assertMissing(self.storage_conn.update_customer, + self.admin_ctxt, UUID, {}) + + def test_delete_customer(self): + _, data = self.create_customer(self.merchant['id']) + self.storage_conn.delete_customer(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_customer, + self.admin_ctxt, data['id']) + + def test_delete_customer_missing(self): + self.assertMissing(self.storage_conn.delete_customer, + self.admin_ctxt, UUID) + + # Products + def test_create_product(self): + f, data = self.create_product(self.merchant['id']) + self.assertData(f, data) + + def test_get_product(self): + f, expected = self.create_product(self.merchant['id']) + actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) + self.assertData(expected, actual) + + def test_get_product_missing(self): + self.assertMissing(self.storage_conn.get_product, + self.admin_ctxt, UUID) + + def test_update_product(self): + fixture, data = self.create_product(self.merchant['id']) + + fixture['name'] = 'test' + updated = self.storage_conn.update_product( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_product_missing(self): + self.assertMissing(self.storage_conn.update_product, + self.admin_ctxt, UUID, {}) + + def test_delete_product(self): + fixture, data = self.create_product(self.merchant['id']) + self.storage_conn.delete_product(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_product, + self.admin_ctxt, data['id']) + + def test_delete_product_missing(self): + self.assertMissing(self.storage_conn.delete_product, + self.admin_ctxt, UUID) + + # Plan + def test_create_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + self.assertData(fixture, data) + + def test_get_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) + + # FIXME(ekarlso): This should test the actual items also? But atm + # there's am error that if the value is int when getting added it's + # string when returned... + self.assertEqual(data['name'], actual['name']) + self.assertEqual(data['title'], actual['title']) + self.assertEqual(data['description'], actual['description']) + + def test_get_plan_missing(self): + self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) + + def test_update_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + + fixture['name'] = 'test' + updated = self.storage_conn.update_plan( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_plan_missing(self): + self.assertMissing(self.storage_conn.update_plan, + self.admin_ctxt, UUID, {}) + + def test_delete_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + self.storage_conn.delete_plan(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_plan, + self.admin_ctxt, data['id']) + + def test_delete_plan_missing(self): + self.assertMissing(self.storage_conn.delete_plan, + self.admin_ctxt, UUID) diff --git a/billingstack/tests/storage/test_sqlalchemy.py b/billingstack/tests/central/storage/test_sqlalchemy.py similarity index 93% rename from billingstack/tests/storage/test_sqlalchemy.py rename to billingstack/tests/central/storage/test_sqlalchemy.py index 44ae77c..dc846b6 100644 --- a/billingstack/tests/storage/test_sqlalchemy.py +++ b/billingstack/tests/central/storage/test_sqlalchemy.py @@ -16,7 +16,7 @@ # # Copied: billingstack from billingstack.openstack.common import log as logging -from billingstack.tests.storage import StorageDriverTestCase +from billingstack.tests.central.storage.base import StorageDriverTestCase LOG = logging.getLogger(__name__) diff --git a/billingstack/tests/collector/storage/__init__.py b/billingstack/tests/collector/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/tests/rater/storage/__init__.py b/billingstack/tests/rater/storage/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py index cdd440f..e69de29 100644 --- a/billingstack/tests/storage/__init__.py +++ b/billingstack/tests/storage/__init__.py @@ -1,501 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log as logging -from billingstack.central.storage.impl_sqlalchemy import models -from billingstack.tests.base import TestCase - - -LOG = logging.getLogger(__name__) - - -UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' - - -class StorageDriverTestCase(TestCase): - __test__ = False - - def setUp(self): - super(StorageDriverTestCase, self).setUp() - self.storage_conn = self.get_storage_driver() - self.setSamples() - - def create_language(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('language', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) - - def create_currency(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('currency', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) - - def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): - methods = [self.get_fixture('pg_method')] or methods - if not 'methods' in values: - values['methods'] = methods - - fixture = self.get_fixture('pg_provider', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - data = self.storage_conn.pg_provider_register( - ctxt, fixture.copy(), **kw) - - return fixture, data - - def create_merchant(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('merchant', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - self._account_defaults(fixture) - - return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - - def create_pg_config(self, merchant_id, fixture=0, values={}, - **kw): - fixture = self.get_fixture('pg_config', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_pg_config( - ctxt, merchant_id, fixture, **kw) - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('customer', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - self._account_defaults(fixture) - return fixture, self.storage_conn.create_customer( - ctxt, merchant_id, fixture, **kw) - - def create_payment_method(self, customer_id, fixture=0, - values={}, **kw): - fixture = self.get_fixture('payment_method', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_payment_method( - ctxt, customer_id, fixture, **kw) - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('product', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_product( - ctxt, merchant_id, fixture, **kw) - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('plan', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_plan( - ctxt, merchant_id, fixture, **kw) - - # Currencies - def test_create_currency(self): - self.assertDuplicate(self.create_currency) - - # Languages - def test_create_language(self): - self.assertDuplicate(self.create_language) - - def test_set_properties(self): - fixture, data = self.create_product(self.merchant['id']) - - metadata = {"random": True} - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - metadata.update({'foo': 1, 'bar': 2}) - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) - self.assertLen(6, actual['properties']) - - # Payment Gateways - def test_pg_provider_register(self): - fixture, actual = self.pg_provider_register() - self.assertEqual(fixture['name'], actual['name']) - self.assertEqual(fixture['title'], actual['title']) - self.assertEqual(fixture['description'], actual['description']) - self.assertData(fixture['methods'][0], actual['methods'][0]) - - def test_pg_provider_register_different_methods(self): - # Add a Global method - method1 = {'type': 'creditcard', 'name': 'mastercard'} - method2 = {'type': 'creditcard', 'name': 'amex'} - method3 = {'type': 'creditcard', 'name': 'visa'} - - provider = {'name': 'noop', 'methods': [method1, method2, method3]} - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, provider) - - # TODO(ekarls): Make this more extensive? - self.assertLen(3, provider['methods']) - - def test_get_pg_provider(self): - _, expected = self.pg_provider_register() - actual = self.storage_conn.get_pg_provider(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - def test_get_pg_provider_missing(self): - self.assertMissing(self.storage_conn.get_pg_provider, - self.admin_ctxt, UUID) - - def test_pg_provider_deregister(self): - _, data = self.pg_provider_register() - self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, data['id']) - - def test_pg_provider_deregister_missing(self): - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, UUID) - - # Payment Gateway Configuration - def test_create_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - self.assertData(fixture, data) - - def test_get_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - def test_get_pg_config_missing(self): - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, UUID) - - def test_update_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - fixture['properties'] = {"api": 1} - updated = self.storage_conn.update_pg_config( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_pg_config_missing(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - self.assertMissing(self.storage_conn.update_pg_config, - self.admin_ctxt, UUID, {}) - - def test_delete_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, data['id']) - - def test_delete_pg_config_missing(self): - self.assertMissing(self.storage_conn.delete_pg_config, - self.admin_ctxt, UUID) - - # PaymentMethod - def test_create_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method( - customer['id'], values=values) - self.assertData(fixture, data) - - def test_get_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - _, expected = self.create_payment_method( - customer['id'], values=values) - actual = self.storage_conn.get_payment_method(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - # TODO(ekarlso): Make this test more extensive? - def test_list_payment_methods(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - - values = { - 'provider_config_id': config['id']} - - # Add two Customers with some methods - _, customer1 = self.create_customer(self.merchant['id']) - self.create_payment_method( - customer1['id'], values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer1['id']}) - self.assertLen(1, rows) - - _, customer2 = self.create_customer(self.merchant['id']) - self.create_payment_method( - customer2['id'], values=values) - self.create_payment_method( - customer2['id'], values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer2['id']}) - self.assertLen(2, rows) - - def test_get_payment_method_missing(self): - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, UUID) - - def test_update_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method( - customer['id'], values=values) - - fixture['identifier'] = 1 - updated = self.storage_conn.update_payment_method(self.admin_ctxt, - data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_payment_method_missing(self): - self.assertMissing(self.storage_conn.update_payment_method, - self.admin_ctxt, UUID, {}) - - def test_delete_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method( - customer['id'], values=values) - - self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, data['id']) - - def test_delete_payment_method_missing(self): - self.assertMissing(self.storage_conn.delete_payment_method, - self.admin_ctxt, UUID) - - # Merchant - def test_create_merchant(self): - fixture, data = self.create_merchant() - self.assertData(fixture, data) - - def test_get_merchant(self): - _, expected = self.create_merchant() - actual = self.storage_conn.get_merchant( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_merchant_missing(self): - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, UUID) - - def test_update_merchant(self): - fixture, data = self.create_merchant() - - fixture['name'] = 'test' - updated = self.storage_conn.update_merchant( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_merchant_missing(self): - self.assertMissing(self.storage_conn.update_merchant, - self.admin_ctxt, UUID, {}) - - def test_delete_merchant(self): - self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, self.merchant['id']) - - def test_delete_merchant_missing(self): - self.assertMissing(self.storage_conn.delete_merchant, - self.admin_ctxt, UUID) - - # Customer - def test_create_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - assert data['default_info'] == {} - assert data['contact_info'] == [] - self.assertData(fixture, data) - - def test_create_customer_with_contact_info(self): - contact_fixture = self.get_fixture('contact_info') - customer_fixture, data = self.create_customer( - self.merchant['id'], - values={'contact_info': contact_fixture}) - self.assertData(customer_fixture, data) - self.assertData(contact_fixture, data['default_info']) - self.assertData(contact_fixture, data['contact_info'][0]) - - def test_get_customer(self): - _, expected = self.create_customer(self.merchant['id']) - actual = self.storage_conn.get_customer( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_customer_missing(self): - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, UUID) - - def test_update_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_customer( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_customer_missing(self): - self.assertMissing(self.storage_conn.update_customer, - self.admin_ctxt, UUID, {}) - - def test_delete_customer(self): - _, data = self.create_customer(self.merchant['id']) - self.storage_conn.delete_customer(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, data['id']) - - def test_delete_customer_missing(self): - self.assertMissing(self.storage_conn.delete_customer, - self.admin_ctxt, UUID) - - # Products - def test_create_product(self): - f, data = self.create_product(self.merchant['id']) - self.assertData(f, data) - - def test_get_product(self): - f, expected = self.create_product(self.merchant['id']) - actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_product_missing(self): - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, UUID) - - def test_update_product(self): - fixture, data = self.create_product(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_product( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_product_missing(self): - self.assertMissing(self.storage_conn.update_product, - self.admin_ctxt, UUID, {}) - - def test_delete_product(self): - fixture, data = self.create_product(self.merchant['id']) - self.storage_conn.delete_product(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, data['id']) - - def test_delete_product_missing(self): - self.assertMissing(self.storage_conn.delete_product, - self.admin_ctxt, UUID) - - # Plan - def test_create_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.assertData(fixture, data) - - def test_get_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) - - # FIXME(ekarlso): This should test the actual items also? But atm - # there's am error that if the value is int when getting added it's - # string when returned... - self.assertEqual(data['name'], actual['name']) - self.assertEqual(data['title'], actual['title']) - self.assertEqual(data['description'], actual['description']) - - def test_get_plan_missing(self): - self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) - - def test_update_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_plan( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_plan_missing(self): - self.assertMissing(self.storage_conn.update_plan, - self.admin_ctxt, UUID, {}) - - def test_delete_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.storage_conn.delete_plan(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_plan, - self.admin_ctxt, data['id']) - - def test_delete_plan_missing(self): - self.assertMissing(self.storage_conn.delete_plan, - self.admin_ctxt, UUID) diff --git a/tools/patch_tox_venv.py b/tools/patch_tox_venv.py index bcd1fc5..7a8f8fb 100644 --- a/tools/patch_tox_venv.py +++ b/tools/patch_tox_venv.py @@ -26,11 +26,12 @@ def main(argv): venv = os.environ['VIRTUAL_ENV'] pip_requires = os.path.join(root, 'tools', 'pip-requires') + pip_options = os.path.join(root, 'tools', 'pip-options') test_requires = os.path.join(root, 'tools', 'test-requires') py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1]) project = 'Quantum' - install = install_venv.InstallVenv(root, venv, pip_requires, test_requires, - py_version, project) + install = install_venv.InstallVenv(root, venv, pip_requires, pip_options, + test_requires, py_version, project) #NOTE(dprince): For Tox we only run post_process (which patches files, etc) install.post_process() diff --git a/tools/test-requires b/tools/test-requires index 2510bec..6ae2af5 100644 --- a/tools/test-requires +++ b/tools/test-requires @@ -2,11 +2,14 @@ Babel>=0.9.6 coverage docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. +flake8 mock mox nose nosehtmloutput openstack.nose_plugin +python-subunit sphinx sphinxcontrib-httpdomain +testrepository unittest2 diff --git a/tox.ini b/tox.ini index a56239f..9d3e754 100644 --- a/tox.ini +++ b/tox.ini @@ -1,40 +1,42 @@ [tox] -envlist = py26,py27,pep8,pyflakes -minversion = 1.4.0 - -[tox:jenkins] -downloadcache = ~/cache/pip +envlist = py26,py27,pep8 [testenv] -deps = -r{toxinidir}/tools/setup-requires - -r{toxinidir}/tools/pip-requires +setenv = VIRTUAL_ENV={envdir} +deps = -r{toxinidir}/tools/pip-requires -r{toxinidir}/tools/pip-options -r{toxinidir}/tools/test-requires -setenv = VIRTUAL_ENV={envdir} - NOSE_WITH_OPENSTACK=1 - NOSE_OPENSTACK_COLOR=1 - NOSE_OPENSTACK_RED=0.05 - NOSE_OPENSTACK_YELLOW=0.025 - NOSE_OPENSTACK_SHOW_ELAPSED=1 -commands = nosetests {posargs} -sitepackages = False + setuptools_git>=0.4 +commands = + python tools/patch_tox_venv.py + python setup.py testr --slowest --testr-args='{posargs}' -[testenv:cover] -deps = {[testenv]deps} - coverage - nosexcover -setenv = {[testenv]setenv} - NOSE_WITH_COVERAGE=1 +[tox:jenkins] +sitepackages = True +downloadcache = ~/cache/pip [testenv:pep8] -deps = {[testenv]deps} - pep8==1.3.3 -commands = pep8 --repeat --show-source --exclude=.venv,.tox,dist,doc,openstack billingstack setup.py bin/billingstack-api bin/billingstack-db-manage +commands = + flake8 -[testenv:pyflakes] -deps = {[testenv]deps} - pyflakes==0.6.1 -commands = pyflakes billingstack bin setup.py +[testenv:cover] +commands = + python tools/patch_tox_venv.py + python setup.py testr --coverage --testr-args='{posargs}' [testenv:venv] commands = {posargs} + +[flake8] +# E711/E712 comparison to False should be 'if cond is False:' or 'if not cond:' +# query = query.filter(Component.disabled == False) +# E125 continuation line does not distinguish itself from next logical line +# H301 one import per line +# H302 import only modules +# TODO(marun) H404 multi line docstring should start with a summary +# TODO(marun) H901,902 use the not operator inline for clarity +# TODO(markmcclain) H202 assertRaises Exception too broad +ignore = E711,E712,E125,H301,H302,H404,H901,H902,H202 +show-source = true +builtins = _ +exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tools From 3b768f90dcff39ffda15dbae02138ffe6e9cf184 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 10 May 2013 01:04:16 +0200 Subject: [PATCH 129/182] Consume thread group Change-Id: I900de0e203bade86030fd3c0da0e30d2b9319162 --- billingstack/openstack/common/rpc/amqp.py | 3 +++ billingstack/openstack/common/rpc/common.py | 15 +++++++++++++ .../openstack/common/rpc/impl_fake.py | 3 +++ .../openstack/common/rpc/impl_kombu.py | 20 ++++++++++++----- .../openstack/common/rpc/impl_qpid.py | 20 ++++++++++++----- billingstack/openstack/common/rpc/impl_zmq.py | 22 ++++++++++++++----- billingstack/openstack/common/rpc/service.py | 2 +- 7 files changed, 66 insertions(+), 19 deletions(-) diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index 2d97981..3677c7e 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -174,6 +174,9 @@ def join_consumer_pool(self, callback, pool_name, topic, exchange_name): def consume_in_thread(self): self.connection.consume_in_thread() + def consume_in_thread_group(self, thread_group): + self.connection.consume_in_thread_group(thread_group) + def __getattr__(self, key): """Proxy all other calls to the Connection instance""" if self.connection: diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py index b753644..bc1e345 100644 --- a/billingstack/openstack/common/rpc/common.py +++ b/billingstack/openstack/common/rpc/common.py @@ -250,6 +250,21 @@ def consume_in_thread(self): """ raise NotImplementedError() + def consume_in_thread_group(self, thread_group): + """ + Spawn a thread to handle incoming messages in the supplied + ThreadGroup. + + Spawn a thread that will be responsible for handling all incoming + messages for consumers that were set up on this connection. + + Message dispatching inside of this is expected to be implemented in a + non-blocking manner. An example implementation would be having this + thread pull messages in for all of the consumers, but utilize a thread + pool for dispatching the messages to the proxy objects. + """ + raise NotImplementedError() + def _safe_log(log_func, msg, msg_data): """Sanitizes the msg_data field before logging.""" diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py index f5764ed..a71c0f5 100644 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ b/billingstack/openstack/common/rpc/impl_fake.py @@ -120,6 +120,9 @@ def close(self): def consume_in_thread(self): pass + def consume_in_thread_group(self, thread_group): + pass + def create_connection(conf, new=True): """Create a connection""" diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index af59bae..b3c2024 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -721,17 +721,25 @@ def consume(self, limit=None): except StopIteration: return + def _consumer_thread_callback(self): + """ Consumer thread callback used by consume_in_* """ + try: + self.consume() + except greenlet.GreenletExit: + return + def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread""" - def _consumer_thread(): - try: - self.consume() - except greenlet.GreenletExit: - return + if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn(_consumer_thread) + self.consumer_thread = eventlet.spawn( + self._consumer_thread_callback) return self.consumer_thread + def consume_in_thread_group(self, thread_group): + """ Consume from all queues/consumers in the supplied ThreadGroup""" + thread_group.add_thread(self._consumer_thread_callback) + def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object""" proxy_cb = rpc_amqp.ProxyCallback( diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index 6c4c1c9..356886a 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -510,6 +510,13 @@ def notify_send(self, topic, msg, **kwargs): """Send a notify message on a topic""" self.publisher_send(NotifyPublisher, topic, msg) + def _consumer_thread_callback(self): + """ Consumer thread callback used by consume_in_* """ + try: + self.consume() + except greenlet.GreenletExit: + return + def consume(self, limit=None): """Consume from all queues/consumers""" it = self.iterconsume(limit=limit) @@ -521,15 +528,16 @@ def consume(self, limit=None): def consume_in_thread(self): """Consumer from all queues/consumers in a greenthread""" - def _consumer_thread(): - try: - self.consume() - except greenlet.GreenletExit: - return + if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn(_consumer_thread) + self.consumer_thread = eventlet.spawn( + self._consumer_thread_callback) return self.consumer_thread + def consume_in_thread_group(self, thread_group): + """ Consume from all queues/consumers in the supplied ThreadGroup""" + thread_group.add_thread(self._consumer_thread_callback) + def create_consumer(self, topic, proxy, fanout=False): """Create a consumer that calls a method in a proxy object""" proxy_cb = rpc_amqp.ProxyCallback( diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index 2cd8126..0b1c719 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -406,17 +406,24 @@ def register(self, proxy, in_addr, zmq_type_in, out_addr=None, LOG.info(_("Out reactor registered")) - def consume_in_thread(self): - def _consume(sock): - LOG.info(_("Consuming socket")) - while True: - self.consume(sock) + def _consumer_thread_callback(self, sock): + """ Consumer thread callback used by consume_in_* """ + + LOG.info(_("Consuming socket")) + while True: + self.consume(sock) + def consume_in_thread(self): for k in self.proxies.keys(): self.threads.append( - self.pool.spawn(_consume, k) + self.pool.spawn(self._consumer_thread_callback, k) ) + def consume_in_thread_group(self, thread_group): + """ Consume from all queues/consumers in the supplied ThreadGroup""" + for k in self.proxies.keys(): + thread_group.add_thread(self._consumer_thread_callback, k) + def wait(self): for t in self.threads: t.wait() @@ -654,6 +661,9 @@ def consume_in_thread(self): _get_matchmaker().start_heartbeat() self.reactor.consume_in_thread() + def consume_in_thread_group(self, thread_group): + self.reactor.consume_in_thread_group(thread_group) + def _cast(addr, context, topic, msg, timeout=None, envelope=False, _msg_id=None): diff --git a/billingstack/openstack/common/rpc/service.py b/billingstack/openstack/common/rpc/service.py index 48f9298..c38e3c2 100644 --- a/billingstack/openstack/common/rpc/service.py +++ b/billingstack/openstack/common/rpc/service.py @@ -63,7 +63,7 @@ def start(self): self.manager.initialize_service_hook(self) # Consume from all consumers in a thread - self.conn.consume_in_thread() + self.conn.consume_in_thread_group(self.tg) def stop(self): # Try to shut the connection down, but if we get any sort of From 9b1edc15d68a654a235078d141656bd5260b0b48 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 13 May 2013 07:45:39 +0200 Subject: [PATCH 130/182] Remove usage methods from Central Change-Id: I2a8f7a0b6b3c2738b354cd6275014aee18996483 --- billingstack/central/rpcapi.py | 18 ------------------ billingstack/central/service.py | 15 --------------- 2 files changed, 33 deletions(-) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 67c8fb0..461dd96 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -306,23 +306,5 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) - # Subscriptions - def create_usage(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_usage', values=values)) - - def list_usages(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_usages', - criterion=criterion)) - - def get_usage(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_usage', id_=id_)) - - def update_usage(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_usage', id_=id_, - values=values)) - - def delete_usage(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_usage', id_=id_)) - central_api = CentralAPI() diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 4b22cfd..f18c855 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -280,18 +280,3 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.storage_conn.delete_subscription(ctxt, id_) - - def create_usage(self, ctxt, values): - return self.storage_conn.create_usage(ctxt, values) - - def list_usages(self, ctxt, **kw): - return self.storage_conn.list_usages(ctxt, **kw) - - def get_usage(self, ctxt, id_): - return self.storage_conn.get_usage(ctxt, id_) - - def update_usage(self, ctxt, id_, values): - return self.storage_conn.update_usage(ctxt, id_, values) - - def delete_usage(self, ctxt, id_): - return self.storage_conn.delete_usage(ctxt, id_) From 488ec9036339005c485acf02508aa6abb1637523 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 13 May 2013 09:29:40 +0200 Subject: [PATCH 131/182] Fixup storage parts of tests Change-Id: I1488ee2f430b705329b8ff76e344fdbe489ca290 --- billingstack/tests/api/base.py | 4 ++- billingstack/tests/base.py | 47 +++++++++++++++++++++------------- 2 files changed, 32 insertions(+), 19 deletions(-) diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 72455df..a516b38 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -155,7 +155,9 @@ def setUp(self): super(FunctionalTest, self).setUp() # NOTE: Needs to be started after the db schema is created - self.start_service() + conn = self.get_storage_connection('central') + conn.setup_schema() + self.start_service('central') self.setSamples() diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index b0f52f8..99f4c44 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -112,38 +112,45 @@ def setUp(self): def tearDown(self): # NOTE: Currently disabled - #policy.reset() - storage = self.get_storage_connection() - storage.teardown_schema() + for svc in self.services.values(): + svc.storage_conn.teardown_schema() super(TestCase, self).tearDown() - def get_storage_connection(self, service='central'): + def get_storage_connection(self, service='central', **kw): + """ + Import the storage module for the service that we are going to act on, + then return a connection object for that storage module. + + :param service: The service. + """ storage = importutils.import_module('billingstack.%s.storage' % service) - connection = storage.get_connection() + + driver = kw.get('storage_driver', 'sqlalchemy') + engine = storage.get_engine(driver) + + self.config(storage_driver=driver, group='service:%s' % service) + + db = kw.get('database_connection', 'sqlite://') + self.config(database_connection=db, group='%s:%s' % (service, driver)) + + connection = engine.get_connection() + return connection def get_service(self, service='central'): + """ + Return a service + :param service: The service. + """ svc = importutils.import_class('billingstack.%s.service.Service' % service) return svc() def start_service(self, service='central'): - self.config( - storage_driver='sqlalchemy', - group='service:%s' % service - ) - - self.config( - database_connection='sqlite://', - group='%s:sqlalchemy' % service - ) - - storage = self.get_storage_connection(service=service) - storage.setup_schema() - svc = self.get_service(service=service) + svc.start() self.services[service] = svc @@ -278,3 +285,7 @@ def setUp(self): self.storage_conn = self.get_storage_connection() self.storage_conn.setup_schema() self.setSamples() + + def tearDown(self): + self.storage_conn.teardown_schema() + super(StorageTestCase, self).tearDown() From e26b5b7380d272f6edca90583675089eddafe275 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 13 May 2013 10:36:55 +0200 Subject: [PATCH 132/182] Ignore sqlite and testr Change-Id: I3d7677e70551ff3a00fae9740e91f813ceebf2a4 --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 1717b2f..6dcb3e5 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ pip-log.txt .coverage .tox nosetests.xml +.testrepository # Translations *.mo @@ -45,3 +46,4 @@ ChangeLog etc/billingstack/*.ini etc/billingstack/*.conf billingstack/versioninfo +*.sqlite From faa19f7d12c41e3d0ce19732a659e6fe2ee0ba34 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 23 May 2013 21:44:47 +0200 Subject: [PATCH 133/182] Switch to PBR Change-Id: Iaf147c8b282fd17032cc8ea29b2820480790dee5 --- README.md => README.rst | 0 billingstack/openstack/common/setup.py | 367 ----------------------- billingstack/openstack/common/version.py | 94 ------ billingstack/version.py | 5 +- openstack.conf | 2 - setup.cfg | 69 ++++- setup.py | 97 +----- tools/pip-requires | 3 + 8 files changed, 79 insertions(+), 558 deletions(-) rename README.md => README.rst (100%) delete mode 100644 billingstack/openstack/common/setup.py delete mode 100644 billingstack/openstack/common/version.py diff --git a/README.md b/README.rst similarity index 100% rename from README.md rename to README.rst diff --git a/billingstack/openstack/common/setup.py b/billingstack/openstack/common/setup.py deleted file mode 100644 index ba6b54a..0000000 --- a/billingstack/openstack/common/setup.py +++ /dev/null @@ -1,367 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Utilities with minimum-depends for use in setup.py -""" - -import email -import os -import re -import subprocess -import sys - -from setuptools.command import sdist - - -def parse_mailmap(mailmap='.mailmap'): - mapping = {} - if os.path.exists(mailmap): - with open(mailmap, 'r') as fp: - for l in fp: - try: - canonical_email, alias = re.match( - r'[^#]*?(<.+>).*(<.+>).*', l).groups() - except AttributeError: - continue - mapping[alias] = canonical_email - return mapping - - -def _parse_git_mailmap(git_dir, mailmap='.mailmap'): - mailmap = os.path.join(os.path.dirname(git_dir), mailmap) - return parse_mailmap(mailmap) - - -def canonicalize_emails(changelog, mapping): - """Takes in a string and an email alias mapping and replaces all - instances of the aliases in the string with their real email. - """ - for alias, email_address in mapping.iteritems(): - changelog = changelog.replace(alias, email_address) - return changelog - - -# Get requirements from the first file that exists -def get_reqs_from_files(requirements_files): - for requirements_file in requirements_files: - if os.path.exists(requirements_file): - with open(requirements_file, 'r') as fil: - return fil.read().split('\n') - return [] - - -def parse_requirements(requirements_files=['requirements.txt', - 'tools/pip-requires']): - requirements = [] - for line in get_reqs_from_files(requirements_files): - # For the requirements list, we need to inject only the portion - # after egg= so that distutils knows the package it's looking for - # such as: - # -e git://github.com/openstack/nova/master#egg=nova - if re.match(r'\s*-e\s+', line): - requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1', - line)) - # such as: - # http://github.com/openstack/nova/zipball/master#egg=nova - elif re.match(r'\s*https?:', line): - requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1', - line)) - # -f lines are for index locations, and don't get used here - elif re.match(r'\s*-f\s+', line): - pass - # argparse is part of the standard library starting with 2.7 - # adding it to the requirements list screws distro installs - elif line == 'argparse' and sys.version_info >= (2, 7): - pass - else: - requirements.append(line) - - return requirements - - -def parse_dependency_links(requirements_files=['requirements.txt', - 'tools/pip-requires']): - dependency_links = [] - # dependency_links inject alternate locations to find packages listed - # in requirements - for line in get_reqs_from_files(requirements_files): - # skip comments and blank lines - if re.match(r'(\s*#)|(\s*$)', line): - continue - # lines with -e or -f need the whole line, minus the flag - if re.match(r'\s*-[ef]\s+', line): - dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) - # lines that are only urls can go in unmolested - elif re.match(r'\s*https?:', line): - dependency_links.append(line) - return dependency_links - - -def _run_shell_command(cmd, throw_on_error=False): - if os.name == 'nt': - output = subprocess.Popen(["cmd.exe", "/C", cmd], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - else: - output = subprocess.Popen(["/bin/sh", "-c", cmd], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - out = output.communicate() - if output.returncode and throw_on_error: - raise Exception("%s returned %d" % cmd, output.returncode) - if len(out) == 0: - return None - if len(out[0].strip()) == 0: - return None - return out[0].strip() - - -def _get_git_directory(): - parent_dir = os.path.dirname(__file__) - while True: - git_dir = os.path.join(parent_dir, '.git') - if os.path.exists(git_dir): - return git_dir - parent_dir, child = os.path.split(parent_dir) - if not child: # reached to root dir - return None - - -def write_git_changelog(): - """Write a changelog based on the git changelog.""" - new_changelog = 'ChangeLog' - git_dir = _get_git_directory() - if not os.getenv('SKIP_WRITE_GIT_CHANGELOG'): - if git_dir: - git_log_cmd = 'git --git-dir=%s log' % git_dir - changelog = _run_shell_command(git_log_cmd) - mailmap = _parse_git_mailmap(git_dir) - with open(new_changelog, "w") as changelog_file: - changelog_file.write(canonicalize_emails(changelog, mailmap)) - else: - open(new_changelog, 'w').close() - - -def generate_authors(): - """Create AUTHORS file using git commits.""" - jenkins_email = 'jenkins@review.(openstack|stackforge).org' - old_authors = 'AUTHORS.in' - new_authors = 'AUTHORS' - git_dir = _get_git_directory() - if not os.getenv('SKIP_GENERATE_AUTHORS'): - if git_dir: - # don't include jenkins email address in AUTHORS file - git_log_cmd = ("git --git-dir=" + git_dir + - " log --format='%aN <%aE>' | sort -u | " - "egrep -v '" + jenkins_email + "'") - changelog = _run_shell_command(git_log_cmd) - signed_cmd = ("git --git-dir=" + git_dir + - " log | grep -i Co-authored-by: | sort -u") - signed_entries = _run_shell_command(signed_cmd) - if signed_entries: - new_entries = "\n".join( - [signed.split(":", 1)[1].strip() - for signed in signed_entries.split("\n") if signed]) - changelog = "\n".join((changelog, new_entries)) - mailmap = _parse_git_mailmap(git_dir) - with open(new_authors, 'w') as new_authors_fh: - new_authors_fh.write(canonicalize_emails(changelog, mailmap)) - if os.path.exists(old_authors): - with open(old_authors, "r") as old_authors_fh: - new_authors_fh.write('\n' + old_authors_fh.read()) - else: - open(new_authors, 'w').close() - - -_rst_template = """%(heading)s -%(underline)s - -.. automodule:: %(module)s - :members: - :undoc-members: - :show-inheritance: -""" - - -def get_cmdclass(): - """Return dict of commands to run from setup.py.""" - - cmdclass = dict() - - def _find_modules(arg, dirname, files): - for filename in files: - if filename.endswith('.py') and filename != '__init__.py': - arg["%s.%s" % (dirname.replace('/', '.'), - filename[:-3])] = True - - class LocalSDist(sdist.sdist): - """Builds the ChangeLog and Authors files from VC first.""" - - def run(self): - write_git_changelog() - generate_authors() - # sdist.sdist is an old style class, can't use super() - sdist.sdist.run(self) - - cmdclass['sdist'] = LocalSDist - - # If Sphinx is installed on the box running setup.py, - # enable setup.py to build the documentation, otherwise, - # just ignore it - try: - from sphinx.setup_command import BuildDoc - - class LocalBuildDoc(BuildDoc): - - builders = ['html', 'man'] - - def generate_autoindex(self): - print "**Autodocumenting from %s" % os.path.abspath(os.curdir) - modules = {} - option_dict = self.distribution.get_option_dict('build_sphinx') - source_dir = os.path.join(option_dict['source_dir'][1], 'api') - if not os.path.exists(source_dir): - os.makedirs(source_dir) - for pkg in self.distribution.packages: - if '.' not in pkg: - os.path.walk(pkg, _find_modules, modules) - module_list = modules.keys() - module_list.sort() - autoindex_filename = os.path.join(source_dir, 'autoindex.rst') - with open(autoindex_filename, 'w') as autoindex: - autoindex.write(""".. toctree:: - :maxdepth: 1 - -""") - for module in module_list: - output_filename = os.path.join(source_dir, - "%s.rst" % module) - heading = "The :mod:`%s` Module" % module - underline = "=" * len(heading) - values = dict(module=module, heading=heading, - underline=underline) - - print "Generating %s" % output_filename - with open(output_filename, 'w') as output_file: - output_file.write(_rst_template % values) - autoindex.write(" %s.rst\n" % module) - - def run(self): - if not os.getenv('SPHINX_DEBUG'): - self.generate_autoindex() - - for builder in self.builders: - self.builder = builder - self.finalize_options() - self.project = self.distribution.get_name() - self.version = self.distribution.get_version() - self.release = self.distribution.get_version() - BuildDoc.run(self) - - class LocalBuildLatex(LocalBuildDoc): - builders = ['latex'] - - cmdclass['build_sphinx'] = LocalBuildDoc - cmdclass['build_sphinx_latex'] = LocalBuildLatex - except ImportError: - pass - - return cmdclass - - -def _get_revno(git_dir): - """Return the number of commits since the most recent tag. - - We use git-describe to find this out, but if there are no - tags then we fall back to counting commits since the beginning - of time. - """ - describe = _run_shell_command( - "git --git-dir=%s describe --always" % git_dir) - if "-" in describe: - return describe.rsplit("-", 2)[-2] - - # no tags found - revlist = _run_shell_command( - "git --git-dir=%s rev-list --abbrev-commit HEAD" % git_dir) - return len(revlist.splitlines()) - - -def _get_version_from_git(pre_version): - """Return a version which is equal to the tag that's on the current - revision if there is one, or tag plus number of additional revisions - if the current revision has no tag.""" - - git_dir = _get_git_directory() - if git_dir: - if pre_version: - try: - return _run_shell_command( - "git --git-dir=" + git_dir + " describe --exact-match", - throw_on_error=True).replace('-', '.') - except Exception: - sha = _run_shell_command( - "git --git-dir=" + git_dir + " log -n1 --pretty=format:%h") - return "%s.a%s.g%s" % (pre_version, _get_revno(git_dir), sha) - else: - return _run_shell_command( - "git --git-dir=" + git_dir + " describe --always").replace( - '-', '.') - return None - - -def _get_version_from_pkg_info(package_name): - """Get the version from PKG-INFO file if we can.""" - try: - pkg_info_file = open('PKG-INFO', 'r') - except (IOError, OSError): - return None - try: - pkg_info = email.message_from_file(pkg_info_file) - except email.MessageError: - return None - # Check to make sure we're in our own dir - if pkg_info.get('Name', None) != package_name: - return None - return pkg_info.get('Version', None) - - -def get_version(package_name, pre_version=None): - """Get the version of the project. First, try getting it from PKG-INFO, if - it exists. If it does, that means we're in a distribution tarball or that - install has happened. Otherwise, if there is no PKG-INFO file, pull the - version from git. - - We do not support setup.py version sanity in git archive tarballs, nor do - we support packagers directly sucking our git repo into theirs. We expect - that a source tarball be made from our git repo - or that if someone wants - to make a source tarball from a fork of our repo with additional tags in it - that they understand and desire the results of doing that. - """ - version = os.environ.get("OSLO_PACKAGE_VERSION", None) - if version: - return version - version = _get_version_from_pkg_info(package_name) - if version: - return version - version = _get_version_from_git(pre_version) - if version: - return version - raise Exception("Versioning for this project requires either an sdist" - " tarball, or access to an upstream git repository.") diff --git a/billingstack/openstack/common/version.py b/billingstack/openstack/common/version.py deleted file mode 100644 index a593ae3..0000000 --- a/billingstack/openstack/common/version.py +++ /dev/null @@ -1,94 +0,0 @@ - -# Copyright 2012 OpenStack Foundation -# Copyright 2012-2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Utilities for consuming the version from pkg_resources. -""" - -import pkg_resources - - -class VersionInfo(object): - - def __init__(self, package): - """Object that understands versioning for a package - :param package: name of the python package, such as glance, or - python-glanceclient - """ - self.package = package - self.release = None - self.version = None - self._cached_version = None - - def __str__(self): - """Make the VersionInfo object behave like a string.""" - return self.version_string() - - def __repr__(self): - """Include the name.""" - return "VersionInfo(%s:%s)" % (self.package, self.version_string()) - - def _get_version_from_pkg_resources(self): - """Get the version of the package from the pkg_resources record - associated with the package.""" - try: - requirement = pkg_resources.Requirement.parse(self.package) - provider = pkg_resources.get_provider(requirement) - return provider.version - except pkg_resources.DistributionNotFound: - # The most likely cause for this is running tests in a tree - # produced from a tarball where the package itself has not been - # installed into anything. Revert to setup-time logic. - from billingstack.openstack.common import setup - return setup.get_version(self.package) - - def release_string(self): - """Return the full version of the package including suffixes indicating - VCS status. - """ - if self.release is None: - self.release = self._get_version_from_pkg_resources() - - return self.release - - def version_string(self): - """Return the short version minus any alpha/beta tags.""" - if self.version is None: - parts = [] - for part in self.release_string().split('.'): - if part[0].isdigit(): - parts.append(part) - else: - break - self.version = ".".join(parts) - - return self.version - - # Compatibility functions - canonical_version_string = version_string - version_string_with_vcs = release_string - - def cached_version_string(self, prefix=""): - """Generate an object which will expand in a string context to - the results of version_string(). We do this so that don't - call into pkg_resources every time we start up a program when - passing version information into the CONF constructor, but - rather only do the calculation when and if a version is requested - """ - if not self._cached_version: - self._cached_version = "%s%s" % (prefix, - self.version_string()) - return self._cached_version diff --git a/billingstack/version.py b/billingstack/version.py index e7924d7..5341162 100644 --- a/billingstack/version.py +++ b/billingstack/version.py @@ -15,6 +15,5 @@ # under the License. # # Copied: Moniker -from billingstack.openstack.common import version as common_version - -version_info = common_version.VersionInfo('billingstack') +import pbr.version +version_info = pbr.version.VersionInfo('billingstack') diff --git a/openstack.conf b/openstack.conf index 0f654b2..0c83a12 100644 --- a/openstack.conf +++ b/openstack.conf @@ -21,12 +21,10 @@ module=notifier module=processutils module=rpc module=service -module=setup module=threadgroup module=timeutils module=utils module=uuidutils -module=version # Base base=billingstack diff --git a/setup.cfg b/setup.cfg index a3bac0c..019b3bc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,12 +1,54 @@ -[nosetests] -cover-package=billingstack -cover-html=true -cover-html-dir=../../cover -cover-erase=true -cover-inclusive=true -verbosity=2 -detailed-errors=1 -where=billingstack/tests +[metadata] +name = billingstack +summary = Subscription based Billing in Python +description-file = + README.rst +author = Endre Karlson +author-email = dev@billingstack.org +home-page = http://www.billingstack.org/ +classifier = + Environment :: Any + Intended Audience :: Information Technology + Intended Audience :: Financial People + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python + Programming Language :: Python :: 2 + Programming Language :: Python :: 2.7 + Programming Language :: Python :: 2.6 + +[global] +setup-hooks = + pbr.hooks.setup_hook + +[files] +packages = + billingstack +scripts = + bin/billingstack-db-manage + bin/billingstack-manage + bin/billingstack-api + bin/billingstack-central + bin/billingstack-biller + bin/billingstack-collector + bin/billingstack-rater + +[entry_points] +billingstack.central.storage = + sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine + +billingstack.biller.storage = + sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine + +billingstack.rater.storage = + sqlalchemy = billingstack.rater.storage.impl_sqlalchemy:SQLAlchemyEngine + +billingstack.payment_gateway = + dummy = billingstack.payment_gateway.dummy:DummyProvider + +billingstack.manage = + pg-register = billingstack.manage.provider:ProvidersRegister + pg-list = billingstack.manage.provider:ProvidersList [build_sphinx] source-dir = doc/source @@ -15,3 +57,12 @@ all_files = 1 [upload_docs] upload-dir = doc/build/html + +[nosetests] +cover-package = billingstack +cover-html = true +cover-erase = true +cover-inclusive = true +verbosity=2 +detailed-errors=1 +where=billingstack.tests diff --git a/setup.py b/setup.py index 8e0d01b..1e9882d 100644 --- a/setup.py +++ b/setup.py @@ -1,90 +1,21 @@ #!/usr/bin/env python +# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # -# Author: Endre Karlson +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from setuptools import setup, find_packages -import textwrap -from billingstack.openstack.common import setup as common_setup - -install_requires = common_setup.parse_requirements(['tools/pip-requires']) -install_options = common_setup.parse_requirements(['tools/pip-options']) -tests_require = common_setup.parse_requirements(['tools/test-requires']) -setup_require = common_setup.parse_requirements(['tools/setup-requires']) -dependency_links = common_setup.parse_dependency_links([ - 'tools/pip-requires', - 'tools/pip-options', - 'tools/test-requires', - 'tools/setup-requires' -]) - -setup( - name='billingstack', - version=common_setup.get_version('billingstack'), - description='Subscription based Billing in Python', - author='Endre Karlson', - author_email='endre.karlson@gmail.com', - url='https://github/billingstack/billingstack', - packages=find_packages(exclude=['bin']), - include_package_data=True, - test_suite='nose.collector', - setup_requires=setup_require, - install_requires=install_requires, - tests_require=tests_require, - extras_require={ - 'test': tests_require, - 'optional': install_options, - }, - dependency_links=dependency_links, - scripts=[ - 'bin/billingstack-api', - 'bin/billingstack-db-manage', - 'bin/billingstack-manage', - 'bin/billingstack-central', - 'bin/billingstack-biller', - 'bin/billingstack-collector', - 'bin/billingstack-rater' - ], - cmdclass=common_setup.get_cmdclass(), - entry_points=textwrap.dedent(""" - [billingstack.central.storage] - sqlalchemy = billingstack.central.storage.impl_sqlalchemy\ - :SQLAlchemyEngine - - [billingstack.biller.storage] - sqlalchemy = billingstack.biller.storage.impl_sqlalchemy\ - :SQLAlchemyEngine - - [billingstack.rater.storage] - sqlalchemy = billingstack.rater.storage.impl_sqlalchemy\ - :SQLAlchemyEngine - - [billingstack.payment_gateway] - dummy = billingstack.payment_gateway.dummy:DummyProvider +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. - [billingstack.manage] - pg-register = billingstack.manage.provider:ProvidersRegister - pg-list = billingstack.manage.provider:ProvidersList +import setuptools - [billingstack.api.v1.extensions] - """), - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Topic :: Finance :: Subscription Billing', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Environment :: OpenStack', - ], -) +setuptools.setup( + setup_requires=['d2to1>=0.2.10,<0.3', 'pbr>=0.5,<0.6'], + d2to1=True) diff --git a/tools/pip-requires b/tools/pip-requires index d5ccae9..6d8fb35 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -1,3 +1,6 @@ +d2to1>=0.2.10,<0.3 +pbr>=0.5,<0.6 + # This file is managed by openstack-depends argparse cliff From 76fb9f7aacb0b8f4bd42150005b25552011838b1 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 28 May 2013 19:59:35 +0200 Subject: [PATCH 134/182] Add colorizer and run_tests Change-Id: Ifb9bd753c0b8097b8bef6316001861d904ffd444 --- run_tests.sh | 237 ++++++++++++++++++++++++++++++++ tools/colorizer.py | 333 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 570 insertions(+) create mode 100755 run_tests.sh create mode 100755 tools/colorizer.py diff --git a/run_tests.sh b/run_tests.sh new file mode 100755 index 0000000..5f3d2eb --- /dev/null +++ b/run_tests.sh @@ -0,0 +1,237 @@ +#!/bin/bash + +set -eu + +function usage { + echo "Usage: $0 [OPTION]..." + echo "Run Nova's test suite(s)" + echo "" + echo " -V, --virtual-env Always use virtualenv. Install automatically if not present" + echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment" + echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment" + echo " -r, --recreate-db Recreate the test database (deprecated, as this is now the default)." + echo " -n, --no-recreate-db Don't recreate the test database." + echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added." + echo " -u, --update Update the virtual environment with any newer package versions" + echo " -p, --pep8 Just run PEP8 and HACKING compliance check" + echo " -P, --no-pep8 Don't run static code checks" + echo " -c, --coverage Generate coverage report" + echo " -d, --debug Run tests with testtools instead of testr. This allows you to use the debugger." + echo " -h, --help Print this usage message" + echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list" + echo " --virtual-env-path Location of the virtualenv directory" + echo " Default: \$(pwd)" + echo " --virtual-env-name Name of the virtualenv directory" + echo " Default: .venv" + echo " --tools-path Location of the tools directory" + echo " Default: \$(pwd)" + echo "" + echo "Note: with no options specified, the script will try to run the tests in a virtual environment," + echo " If no virtualenv is found, the script will ask if you would like to create one. If you " + echo " prefer to run tests NOT in a virtual environment, simply pass the -N option." + exit +} + +function process_options { + i=1 + while [ $i -le $# ]; do + case "${!i}" in + -h|--help) usage;; + -V|--virtual-env) always_venv=1; never_venv=0;; + -N|--no-virtual-env) always_venv=0; never_venv=1;; + -s|--no-site-packages) no_site_packages=1;; + -r|--recreate-db) recreate_db=1;; + -n|--no-recreate-db) recreate_db=0;; + -f|--force) force=1;; + -u|--update) update=1;; + -p|--pep8) just_pep8=1;; + -P|--no-pep8) no_pep8=1;; + -c|--coverage) coverage=1;; + -d|--debug) debug=1;; + --virtual-env-path) + (( i++ )) + venv_path=${!i} + ;; + --virtual-env-name) + (( i++ )) + venv_dir=${!i} + ;; + --tools-path) + (( i++ )) + tools_path=${!i} + ;; + -*) testropts="$testropts ${!i}";; + *) testrargs="$testrargs ${!i}" + esac + (( i++ )) + done +} + +tool_path=${tools_path:-$(pwd)} +venv_path=${venv_path:-$(pwd)} +venv_dir=${venv_name:-.venv} +with_venv=tools/with_venv.sh +always_venv=0 +never_venv=0 +force=0 +no_site_packages=0 +installvenvopts= +testrargs= +testropts= +wrapper="" +just_pep8=0 +no_pep8=0 +coverage=0 +debug=0 +recreate_db=1 +update=0 + +LANG=en_US.UTF-8 +LANGUAGE=en_US:en +LC_ALL=C + +process_options $@ +# Make our paths available to other scripts we call +export venv_path +export venv_dir +export venv_name +export tools_dir +export venv=${venv_path}/${venv_dir} + +if [ $no_site_packages -eq 1 ]; then + installvenvopts="--no-site-packages" +fi + +function init_testr { + if [ ! -d .testrepository ]; then + ${wrapper} testr init + fi +} + +function run_tests { + # Cleanup *pyc + ${wrapper} find . -type f -name "*.pyc" -delete + + if [ $debug -eq 1 ]; then + if [ "$testropts" = "" ] && [ "$testrargs" = "" ]; then + # Default to running all tests if specific test is not + # provided. + testrargs="discover ./billingstack/tests" + fi + ${wrapper} python -m testtools.run $testropts $testrargs + + # Short circuit because all of the testr and coverage stuff + # below does not make sense when running testtools.run for + # debugging purposes. + return $? + fi + + if [ $coverage -eq 1 ]; then + TESTRTESTS="$TESTRTESTS --coverage" + else + TESTRTESTS="$TESTRTESTS" + fi + + # Just run the test suites in current environment + set +e + testrargs=`echo "$testrargs" | sed -e's/^\s*\(.*\)\s*$/\1/'` + TESTRTESTS="$TESTRTESTS --testr-args='--subunit $testropts $testrargs'" + if [ setup.cfg -nt billingstack.egg-info/entry_points.txt ] + then + ${wrapper} python setup.py egg_info + fi + echo "Running \`${wrapper} $TESTRTESTS\`" + if ${wrapper} which subunit-2to1 2>&1 > /dev/null + then + # subunit-2to1 is present, testr subunit stream should be in version 2 + # format. Convert to version one before colorizing. + bash -c "${wrapper} $TESTRTESTS | ${wrapper} subunit-2to1 | ${wrapper} tools/colorizer.py" + else + bash -c "${wrapper} $TESTRTESTS | ${wrapper} tools/colorizer.py" + fi + RESULT=$? + set -e + + copy_subunit_log + + if [ $coverage -eq 1 ]; then + echo "Generating coverage report in covhtml/" + # Don't compute coverage for common code, which is tested elsewhere + ${wrapper} coverage combine + ${wrapper} coverage html --include='billingstack/*' --omit='billingstack/openstack/common/*' -d covhtml -i + fi + + return $RESULT +} + +function copy_subunit_log { + LOGNAME=`cat .testrepository/next-stream` + LOGNAME=$(($LOGNAME - 1)) + LOGNAME=".testrepository/${LOGNAME}" + cp $LOGNAME subunit.log +} + +function run_pep8 { + echo "Running flake8 ..." + bash -c "${wrapper} flake8" +} + + +TESTRTESTS="python setup.py testr" + +if [ $never_venv -eq 0 ] +then + # Remove the virtual environment if --force used + if [ $force -eq 1 ]; then + echo "Cleaning virtualenv..." + rm -rf ${venv} + fi + if [ $update -eq 1 ]; then + echo "Updating virtualenv..." + python tools/install_venv.py $installvenvopts + fi + if [ -e ${venv} ]; then + wrapper="${with_venv}" + else + if [ $always_venv -eq 1 ]; then + # Automatically install the virtualenv + python tools/install_venv.py $installvenvopts + wrapper="${with_venv}" + else + echo -e "No virtual environment found...create one? (Y/n) \c" + read use_ve + if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then + # Install the virtualenv and run the test suite in it + python tools/install_venv.py $installvenvopts + wrapper=${with_venv} + fi + fi + fi +fi + +# Delete old coverage data from previous runs +if [ $coverage -eq 1 ]; then + ${wrapper} coverage erase +fi + +if [ $just_pep8 -eq 1 ]; then + run_pep8 + exit +fi + +if [ $recreate_db -eq 1 ]; then + rm -f tests.sqlite +fi + +init_testr +run_tests + +# NOTE(sirp): we only want to run pep8 when we're running the full-test suite, +# not when we're running tests individually. To handle this, we need to +# distinguish between options (testropts), which begin with a '-', and +# arguments (testrargs). +if [ -z "$testrargs" ]; then + if [ $no_pep8 -eq 0 ]; then + run_pep8 + fi +fi diff --git a/tools/colorizer.py b/tools/colorizer.py new file mode 100755 index 0000000..aa7427e --- /dev/null +++ b/tools/colorizer.py @@ -0,0 +1,333 @@ +#!/usr/bin/env python +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (c) 2013, Nebula, Inc. +# Copyright 2010 United States Government as represented by the +# Administrator of the National Aeronautics and Space Administration. +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Colorizer Code is borrowed from Twisted: +# Copyright (c) 2001-2010 Twisted Matrix Laboratories. +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +"""Display a subunit stream through a colorized unittest test runner.""" + +import heapq +import subunit +import sys +import unittest + +import testtools + + +class _AnsiColorizer(object): + """ + A colorizer is an object that loosely wraps around a stream, allowing + callers to write text to the stream in a particular color. + + Colorizer classes must implement C{supported()} and C{write(text, color)}. + """ + _colors = dict(black=30, red=31, green=32, yellow=33, + blue=34, magenta=35, cyan=36, white=37) + + def __init__(self, stream): + self.stream = stream + + def supported(cls, stream=sys.stdout): + """ + A class method that returns True if the current platform supports + coloring terminal output using this method. Returns False otherwise. + """ + if not stream.isatty(): + return False # auto color only on TTYs + try: + import curses + except ImportError: + return False + else: + try: + try: + return curses.tigetnum("colors") > 2 + except curses.error: + curses.setupterm() + return curses.tigetnum("colors") > 2 + except Exception: + # guess false in case of error + return False + supported = classmethod(supported) + + def write(self, text, color): + """ + Write the given text to the stream in the given color. + + @param text: Text to be written to the stream. + + @param color: A string label for a color. e.g. 'red', 'white'. + """ + color = self._colors[color] + self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text)) + + +class _Win32Colorizer(object): + """ + See _AnsiColorizer docstring. + """ + def __init__(self, stream): + import win32console + red, green, blue, bold = (win32console.FOREGROUND_RED, + win32console.FOREGROUND_GREEN, + win32console.FOREGROUND_BLUE, + win32console.FOREGROUND_INTENSITY) + self.stream = stream + self.screenBuffer = win32console.GetStdHandle( + win32console.STD_OUT_HANDLE) + self._colors = { + 'normal': red | green | blue, + 'red': red | bold, + 'green': green | bold, + 'blue': blue | bold, + 'yellow': red | green | bold, + 'magenta': red | blue | bold, + 'cyan': green | blue | bold, + 'white': red | green | blue | bold + } + + def supported(cls, stream=sys.stdout): + try: + import win32console + screenBuffer = win32console.GetStdHandle( + win32console.STD_OUT_HANDLE) + except ImportError: + return False + import pywintypes + try: + screenBuffer.SetConsoleTextAttribute( + win32console.FOREGROUND_RED | + win32console.FOREGROUND_GREEN | + win32console.FOREGROUND_BLUE) + except pywintypes.error: + return False + else: + return True + supported = classmethod(supported) + + def write(self, text, color): + color = self._colors[color] + self.screenBuffer.SetConsoleTextAttribute(color) + self.stream.write(text) + self.screenBuffer.SetConsoleTextAttribute(self._colors['normal']) + + +class _NullColorizer(object): + """ + See _AnsiColorizer docstring. + """ + def __init__(self, stream): + self.stream = stream + + def supported(cls, stream=sys.stdout): + return True + supported = classmethod(supported) + + def write(self, text, color): + self.stream.write(text) + + +def get_elapsed_time_color(elapsed_time): + if elapsed_time > 1.0: + return 'red' + elif elapsed_time > 0.25: + return 'yellow' + else: + return 'green' + + +class NovaTestResult(testtools.TestResult): + def __init__(self, stream, descriptions, verbosity): + super(NovaTestResult, self).__init__() + self.stream = stream + self.showAll = verbosity > 1 + self.num_slow_tests = 10 + self.slow_tests = [] # this is a fixed-sized heap + self.colorizer = None + # NOTE(vish): reset stdout for the terminal check + stdout = sys.stdout + sys.stdout = sys.__stdout__ + for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]: + if colorizer.supported(): + self.colorizer = colorizer(self.stream) + break + sys.stdout = stdout + self.start_time = None + self.last_time = {} + self.results = {} + self.last_written = None + + def _writeElapsedTime(self, elapsed): + color = get_elapsed_time_color(elapsed) + self.colorizer.write(" %.2f" % elapsed, color) + + def _addResult(self, test, *args): + try: + name = test.id() + except AttributeError: + name = 'Unknown.unknown' + test_class, test_name = name.rsplit('.', 1) + + elapsed = (self._now() - self.start_time).total_seconds() + item = (elapsed, test_class, test_name) + if len(self.slow_tests) >= self.num_slow_tests: + heapq.heappushpop(self.slow_tests, item) + else: + heapq.heappush(self.slow_tests, item) + + self.results.setdefault(test_class, []) + self.results[test_class].append((test_name, elapsed) + args) + self.last_time[test_class] = self._now() + self.writeTests() + + def _writeResult(self, test_name, elapsed, long_result, color, + short_result, success): + if self.showAll: + self.stream.write(' %s' % str(test_name).ljust(66)) + self.colorizer.write(long_result, color) + if success: + self._writeElapsedTime(elapsed) + self.stream.writeln() + else: + self.colorizer.write(short_result, color) + + def addSuccess(self, test): + super(NovaTestResult, self).addSuccess(test) + self._addResult(test, 'OK', 'green', '.', True) + + def addFailure(self, test, err): + super(NovaTestResult, self).addFailure(test, err) + self._addResult(test, 'FAIL', 'red', 'F', False) + + def addError(self, test, err): + super(NovaTestResult, self).addFailure(test, err) + self._addResult(test, 'ERROR', 'red', 'E', False) + + def addSkip(self, test, reason=None, details=None): + super(NovaTestResult, self).addSkip(test, reason, details) + self._addResult(test, 'SKIP', 'blue', 'S', True) + + def startTest(self, test): + self.start_time = self._now() + super(NovaTestResult, self).startTest(test) + + def writeTestCase(self, cls): + if not self.results.get(cls): + return + if cls != self.last_written: + self.colorizer.write(cls, 'white') + self.stream.writeln() + for result in self.results[cls]: + self._writeResult(*result) + del self.results[cls] + self.stream.flush() + self.last_written = cls + + def writeTests(self): + time = self.last_time.get(self.last_written, self._now()) + if not self.last_written or (self._now() - time).total_seconds() > 2.0: + diff = 3.0 + while diff > 2.0: + classes = self.results.keys() + oldest = min(classes, key=lambda x: self.last_time[x]) + diff = (self._now() - self.last_time[oldest]).total_seconds() + self.writeTestCase(oldest) + else: + self.writeTestCase(self.last_written) + + def done(self): + self.stopTestRun() + + def stopTestRun(self): + for cls in list(self.results.iterkeys()): + self.writeTestCase(cls) + self.stream.writeln() + self.writeSlowTests() + + def writeSlowTests(self): + # Pare out 'fast' tests + slow_tests = [item for item in self.slow_tests + if get_elapsed_time_color(item[0]) != 'green'] + if slow_tests: + slow_total_time = sum(item[0] for item in slow_tests) + slow = ("Slowest %i tests took %.2f secs:" + % (len(slow_tests), slow_total_time)) + self.colorizer.write(slow, 'yellow') + self.stream.writeln() + last_cls = None + # sort by name + for elapsed, cls, name in sorted(slow_tests, + key=lambda x: x[1] + x[2]): + if cls != last_cls: + self.colorizer.write(cls, 'white') + self.stream.writeln() + last_cls = cls + self.stream.write(' %s' % str(name).ljust(68)) + self._writeElapsedTime(elapsed) + self.stream.writeln() + + def printErrors(self): + if self.showAll: + self.stream.writeln() + self.printErrorList('ERROR', self.errors) + self.printErrorList('FAIL', self.failures) + + def printErrorList(self, flavor, errors): + for test, err in errors: + self.colorizer.write("=" * 70, 'red') + self.stream.writeln() + self.colorizer.write(flavor, 'red') + self.stream.writeln(": %s" % test.id()) + self.colorizer.write("-" * 70, 'red') + self.stream.writeln() + self.stream.writeln("%s" % err) + + +test = subunit.ProtocolTestCase(sys.stdin, passthrough=None) + +if sys.version_info[0:2] <= (2, 6): + runner = unittest.TextTestRunner(verbosity=2) +else: + runner = unittest.TextTestRunner(verbosity=2, resultclass=NovaTestResult) + +if runner.run(test).wasSuccessful(): + exit_code = 0 +else: + exit_code = 1 +sys.exit(exit_code) From 7f773fa10b2579ebd637e5e8fa0cfbb52f41d1bd Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 28 May 2013 21:42:26 +0200 Subject: [PATCH 135/182] Fixturize Change-Id: Ie2319d74181f9f1d3496602b923b583fa942c25c --- .../biller/storage/impl_sqlalchemy.py | 1 + billingstack/tests/api/base.py | 8 +- billingstack/tests/base.py | 343 ++++++++++++++---- billingstack/tests/central/__init__.py | 8 - billingstack/tests/central/storage/base.py | 5 +- .../tests/central/storage/test_sqlalchemy.py | 12 +- 6 files changed, 274 insertions(+), 103 deletions(-) diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py index 82205ea..394adf5 100644 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -14,6 +14,7 @@ """ A Usage plugin using sqlalchemy... """ + from oslo.config import cfg from sqlalchemy.ext.declarative import declarative_base diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index a516b38..a6e4035 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -155,17 +155,11 @@ def setUp(self): super(FunctionalTest, self).setUp() # NOTE: Needs to be started after the db schema is created - conn = self.get_storage_connection('central') - conn.setup_schema() + self.start_storage('central') self.start_service('central') - self.setSamples() self.app = factory({}) self.app.wsgi_app = FaultWrapperMiddleware(self.app.wsgi_app) self.app.wsgi_app = NoAuthContextMiddleware(self.app.wsgi_app) self.client = self.app.test_client() - - def tearDown(self): - self.services.central.stop() - super(FunctionalTest, self).tearDown() diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 99f4c44..f726ca9 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -1,10 +1,18 @@ import copy -import unittest2 +import os +import shutil +import uuid + +import fixtures import mox +import stubout +import testtools + from oslo.config import cfg # NOTE: Currently disabled # from billingstack.openstack.common import policy from billingstack import exceptions +from billingstack import paths from billingstack import samples from billingstack.openstack.common.context import RequestContext, \ get_admin_context @@ -16,6 +24,207 @@ 'billingstack.openstack.common.rpc.impl_fake') +CONF = cfg.CONF +CONF.import_opt('host', 'billingstack.netconf') + + +STORAGE_CACHE = {} + + +# Config Methods +def set_config(**kwargs): + group = kwargs.pop('group', None) + + for k, v in kwargs.iteritems(): + cfg.CONF.set_override(k, v, group) + + +class ConfFixture(fixtures.Fixture): + """Fixture to manage global conf settings.""" + + def __init__(self, conf): + self.conf = conf + + def setUp(self): + super(ConfFixture, self).setUp() + self.conf.set_default('host', 'fake-mini') + self.conf.set_default('fake_rabbit', True) + self.conf.set_default('rpc_backend', + 'billingstack.openstack.common.rpc.impl_fake') + self.conf.set_default('rpc_cast_timeout', 5) + self.conf.set_default('rpc_response_timeout', 5) + self.conf.set_default('verbose', True) + self.addCleanup(self.conf.reset) + + +class FixtureHelper(object): + """Underlying helper object for a StorageFixture to hold driver methods""" + + def __init__(self, fixture): + """ + :param fixture: The fixture object + """ + self.fixture = fixture + + def setUp(self): + """Runs pr test, typically a db reset or similar""" + + def pre_migrate(self): + """Run before migrations""" + + def migrate(self): + """Migrate the storage""" + + def post_migrate(self): + """This is executed after migrations""" + + def post_init(self): + """Runs at the end of the object initialization""" + + +class SQLAlchemyHelper(FixtureHelper): + def __init__(self, fixture): + super(SQLAlchemyHelper, self).__init__(fixture) + + self.sqlite_db = fixture.kw.get('sqlite_db') + self.sqlite_clean_db = fixture.kw.get('sqlite_clean_db') + self.testdb = None + + def setUp(self): + if self.fixture.database_connection == "sqlite://": + conn = self.fixture.connection.engine.connect() + conn.connection.executescript(self._as_string) + self.fixture.addCleanup(self.fixture.connection.engine.dispose) + else: + shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db), + paths.state_path_rel(self.sqlite_db)) + + def pre_migrate(self): + self.fixture.connection.engine.dispose() + self.fixture.connection.engine.connect() + if self.fixture.database_connection == "sqlite://": + #https://github.com/openstack/nova/blob/master/nova/test.py#L82-L84 + pass + else: + testdb = paths.state_path_rel(self.sqlite_db) + if os.path.exists(testdb): + return + + def migrate(self): + self.fixture.connection.setup_schema() + + def post_init(self): + if self.fixture.database_connection == "sqlite://": + conn = self.fixture.connection.engine.connect() + self._as_string = "".join( + l for l in conn.connection.iterdump()) + self.fixture.connection.engine.dispose() + else: + cleandb = paths.state_path_rel(self.sqlite_clean_db) + shutil.copyfile(self.testdb, cleandb) + + +class StorageFixture(fixtures.Fixture): + """ + Storage fixture that for now just supports SQLAlchemy + """ + def __init__(self, svc, **kw): + self.svc = svc + self.kw = kw + + self.driver = kw.get('storage_driver', 'sqlalchemy') + self.database_connection = kw.get('database_connection', 'sqlite://') + + self.svc_group = 'service:%s' % self.svc + self.driver_group = '%s:%s' % (self.svc, self.driver) + + cfg.CONF.import_opt('storage_driver', 'billingstack.%s' % self.svc, + group=self.svc_group) + set_config(storage_driver=self.driver, group=self.svc_group) + + # FIXME: Move this to a generic get_storage() method instead? + self.module = importutils.import_module( + 'billingstack.%s.storage' % self.svc) + + # FIXME: Workout a way to support the different storage types + self.helper = SQLAlchemyHelper(self) + + cfg.CONF.import_opt( + 'database_connection', + 'billingstack.%s.storage.impl_%s' % (self.svc, self.driver), + group=self.driver_group) + + set_config(database_connection=self.database_connection, + group=self.driver_group) + + self.connection = self.get_storage_connection(**kw) + + self.helper.pre_migrate() + self.helper.migrate() + self.helper.post_migrate() + self.helper.post_init() + + for hook in kw.get('hooks', []): + hook() + + def setUp(self): + super(StorageFixture, self).setUp() + self.helper.setUp() + + def get_storage_connection(self, **kw): + """ + Import the storage module for the service that we are going to act on, + then return a connection object for that storage module. + + :param service: The service. + """ + engine = self.module.get_engine(self.driver) + return engine.get_connection() + + +class ServiceFixture(fixtures.Fixture): + """Run service as a test fixture, semi-copied from Nova""" + + def __init__(self, name, host=None, **kwargs): + host = host and host or uuid.uuid4().hex + kwargs.setdefault('host', host) + kwargs.setdefault('binary', 'billingstack-%s' % name) + self.name = name + self.kwargs = kwargs + + self.cls = self.get_service(self.name) + + @staticmethod + def get_service(svc): + """ + Return a service + + :param service: The service. + """ + return importutils.import_class('billingstack.%s.service.Service' % + svc) + + def setUp(self): + super(ServiceFixture, self).setUp() + self.service = self.cls() + self.service.start() + + +class MoxStubout(fixtures.Fixture): + """Deal with code around mox and stubout as a fixture.""" + + def setUp(self): + super(MoxStubout, self).setUp() + # emulate some of the mox stuff, we can't use the metaclass + # because it screws with our generators + self.mox = mox.Mox() + self.stubs = stubout.StubOutForTesting() + self.addCleanup(self.stubs.UnsetAll) + self.addCleanup(self.stubs.SmartUnsetAll) + self.addCleanup(self.mox.UnsetStubs) + self.addCleanup(self.mox.VerifyAll) + + class AssertMixin(object): """ Mixin to hold assert helpers. @@ -43,35 +252,56 @@ def assertData(self, expected_data, data): def assertDuplicate(self, func, *args, **kw): exception = kw.pop('exception', exceptions.Duplicate) - with self.assertRaises(exception): + with testtools.ExpectedException(exception): func(*args, **kw) def assertMissing(self, func, *args, **kw): exception = kw.pop('exception', exceptions.NotFound) - with self.assertRaises(exception): + with testtools.ExpectedException(exception): func(*args, **kw) -class BaseTestCase(unittest2.TestCase, AssertMixin): +class BaseTestCase(testtools.TestCase, AssertMixin): """ - A base test class. + A base test class to be used for typically non-service kind of things. """ def setUp(self): super(BaseTestCase, self).setUp() - self.mox = mox.Mox() - - def tearDown(self): - cfg.CONF.reset() - self.mox.UnsetStubs() - self.mox.VerifyAll() - super(BaseTestCase, self).tearDown() - - # Config Methods - def config(self, **kwargs): - group = kwargs.pop('group', None) - for k, v in kwargs.iteritems(): - cfg.CONF.set_override(k, v, group) + test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) + try: + test_timeout = int(test_timeout) + except ValueError: + # If timeout value is invalid do not set a timeout. + test_timeout = 0 + if test_timeout > 0: + self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) + + if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or + os.environ.get('OS_STDOUT_CAPTURE') == '1'): + stdout = self.useFixture(fixtures.StringStream('stdout')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) + if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or + os.environ.get('OS_STDERR_CAPTURE') == '1'): + stderr = self.useFixture(fixtures.StringStream('stderr')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) + + self.log_fixture = self.useFixture(fixtures.FakeLogger()) + self.useFixture(ConfFixture(cfg.CONF)) + + mox_fixture = self.useFixture(MoxStubout()) + self.mox = mox_fixture + self.stubs = mox_fixture.stubs + self.addCleanup(self._clear_attrs) + self.useFixture(fixtures.EnvironmentVariable('http_proxy')) + #self.policy = self.useFixture(policy_fixture.PolicyFixture()) + + def _clear_attrs(self): + # Delete attributes that don't start with _ so they don't pin + # memory around unnecessarily for the duration of the test + # suite + for key in [k for k in self.__dict__.keys() if k[0] != '_']: + del self.__dict__[key] def get_fixture(self, name, fixture=0, values={}): """ @@ -81,12 +311,6 @@ def get_fixture(self, name, fixture=0, values={}): _values.update(values) return _values - def get_admin_context(self): - return get_admin_context() - - def get_context(self, **kw): - return RequestContext(**kw) - class Services(dict): def __getattr__(self, name): @@ -99,60 +323,34 @@ def __setattr__(self, name, value): class TestCase(BaseTestCase): + """Base test case for services etc""" def setUp(self): super(TestCase, self).setUp() self.samples = samples.get_samples() self.admin_ctxt = self.get_admin_context() - self.config(rpc_backend='billingstack.openstack.common.rpc.impl_fake') - # NOTE: No services up by default self.services = Services() - def tearDown(self): - # NOTE: Currently disabled - for svc in self.services.values(): - svc.storage_conn.teardown_schema() - super(TestCase, self).tearDown() - - def get_storage_connection(self, service='central', **kw): - """ - Import the storage module for the service that we are going to act on, - then return a connection object for that storage module. - - :param service: The service. - """ - storage = importutils.import_module('billingstack.%s.storage' % - service) - - driver = kw.get('storage_driver', 'sqlalchemy') - engine = storage.get_engine(driver) - - self.config(storage_driver=driver, group='service:%s' % service) - - db = kw.get('database_connection', 'sqlite://') - self.config(database_connection=db, group='%s:%s' % (service, driver)) - - connection = engine.get_connection() - - return connection - - def get_service(self, service='central'): - """ - Return a service + def get_admin_context(self): + return get_admin_context() - :param service: The service. - """ - svc = importutils.import_class('billingstack.%s.service.Service' % - service) - return svc() + def get_context(self, **kw): + return RequestContext(**kw) - def start_service(self, service='central'): - svc = self.get_service(service=service) + def start_service(self, name, host=None, **kwargs): + fixture = self.useFixture(ServiceFixture(name, host, **kwargs)) + self.services[name] = fixture.service + return fixture - svc.start() - self.services[service] = svc + def start_storage(self, name, **kw): + fixture = StorageFixture(name, **kw) + global STORAGE_CACHE + if not name in STORAGE_CACHE: + STORAGE_CACHE[name] = fixture + self.useFixture(STORAGE_CACHE[name]) + return fixture def setSamples(self): _, self.currency = self.create_currency() @@ -203,6 +401,7 @@ def create_plan(self, merchant_id, fixture=0, values={}, **kw): class ServiceTestCase(TestCase): + """Testcase with some base methods when running in Service ish mode""" def create_language(self, fixture=0, values={}, **kw): fixture = self.get_fixture('language', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) @@ -277,15 +476,3 @@ def create_plan(self, merchant_id, fixture=0, values={}, **kw): ctxt = kw.pop('context', self.admin_ctxt) return fixture, self.services.central.create_plan( ctxt, merchant_id, fixture, **kw) - - -class StorageTestCase(TestCase): - def setUp(self): - super(StorageTestCase, self).setUp() - self.storage_conn = self.get_storage_connection() - self.storage_conn.setup_schema() - self.setSamples() - - def tearDown(self): - self.storage_conn.teardown_schema() - super(StorageTestCase, self).tearDown() diff --git a/billingstack/tests/central/__init__.py b/billingstack/tests/central/__init__.py index 9163550..e69de29 100644 --- a/billingstack/tests/central/__init__.py +++ b/billingstack/tests/central/__init__.py @@ -1,8 +0,0 @@ -from oslo.config import cfg - - -cfg.CONF.import_opt('storage_driver', 'billingstack.central', - group='service:central') -cfg.CONF.import_opt('database_connection', - 'billingstack.central.storage.impl_sqlalchemy', - group='central:sqlalchemy') diff --git a/billingstack/tests/central/storage/base.py b/billingstack/tests/central/storage/base.py index 716aecf..1a4c3b6 100644 --- a/billingstack/tests/central/storage/base.py +++ b/billingstack/tests/central/storage/base.py @@ -15,7 +15,6 @@ # under the License. from billingstack.openstack.common import log as logging from billingstack.central.storage.impl_sqlalchemy import models -from billingstack.tests.base import StorageTestCase LOG = logging.getLogger(__name__) @@ -24,9 +23,7 @@ UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' -class StorageDriverTestCase(StorageTestCase): - __test__ = False - +class DriverMixin(object): def create_language(self, fixture=0, values={}, **kw): fixture = self.get_fixture('language', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) diff --git a/billingstack/tests/central/storage/test_sqlalchemy.py b/billingstack/tests/central/storage/test_sqlalchemy.py index dc846b6..c9e59cf 100644 --- a/billingstack/tests/central/storage/test_sqlalchemy.py +++ b/billingstack/tests/central/storage/test_sqlalchemy.py @@ -16,15 +16,15 @@ # # Copied: billingstack from billingstack.openstack.common import log as logging -from billingstack.tests.central.storage.base import StorageDriverTestCase +from billingstack.tests.base import TestCase +from billingstack.tests.central.storage.base import DriverMixin LOG = logging.getLogger(__name__) -class SqlalchemyStorageTest(StorageDriverTestCase): - __test__ = True - +class SqlalchemyStorageTest(DriverMixin, TestCase): def setUp(self): - self.config(database_connection='sqlite://', - group='central:sqlalchemy') super(SqlalchemyStorageTest, self).setUp() + fixture = self.start_storage('central') + self.storage_conn = fixture.connection + self.setSamples() From 61fb5dcdd27b66d0e2d05ba51bcb93613814f41b Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 1 Jun 2013 19:55:56 +0200 Subject: [PATCH 136/182] License headers Change-Id: I1d36a8f20480ebee716d9ca75e8baa62fe9a64e1 --- billingstack/__init__.py | 15 +++++++++++++++ billingstack/api/base.py | 16 ++++++++++++++++ billingstack/api/utils.py | 4 +++- billingstack/biller/__init__.py | 15 +++++++++++++++ billingstack/biller/rpcapi.py | 15 +++++++++++++++ billingstack/biller/service.py | 15 +++++++++++++++ billingstack/biller/storage/__init__.py | 16 ++++++++++++++++ billingstack/biller/storage/impl_sqlalchemy.py | 2 ++ billingstack/central/__init__.py | 15 +++++++++++++++ billingstack/central/rpcapi.py | 15 +++++++++++++++ billingstack/central/service.py | 15 +++++++++++++++ billingstack/central/storage/__init__.py | 1 + .../central/storage/impl_sqlalchemy/__init__.py | 2 ++ .../central/storage/impl_sqlalchemy/models.py | 2 ++ billingstack/collector/__init__.py | 15 +++++++++++++++ billingstack/collector/rpcapi.py | 15 +++++++++++++++ billingstack/collector/service.py | 15 +++++++++++++++ billingstack/conf.py | 15 +++++++++++++++ billingstack/exceptions.py | 3 +++ billingstack/manage/__init__.py | 2 ++ billingstack/manage/base.py | 2 ++ billingstack/manage/database.py | 4 ++-- billingstack/manage/provider.py | 15 +++++++++++++++ billingstack/payment_gateway/__init__.py | 15 +++++++++++++++ billingstack/payment_gateway/base.py | 15 +++++++++++++++ billingstack/payment_gateway/dummy.py | 15 +++++++++++++++ billingstack/plugin.py | 4 ++-- billingstack/rater/__init__.py | 15 +++++++++++++++ billingstack/rater/rpcapi.py | 15 +++++++++++++++ billingstack/rater/service.py | 15 +++++++++++++++ billingstack/rater/storage/__init__.py | 15 +++++++++++++++ billingstack/rater/storage/impl_sqlalchemy.py | 2 ++ billingstack/samples.py | 15 +++++++++++++++ billingstack/service.py | 2 -- billingstack/sqlalchemy/__init__.py | 15 +++++++++++++++ billingstack/sqlalchemy/api.py | 5 +++-- billingstack/sqlalchemy/utils.py | 15 +++++++++++++++ billingstack/storage/__init__.py | 15 +++++++++++++++ billingstack/storage/filterer.py | 15 +++++++++++++++ billingstack/utils.py | 13 +++++++++++++ 40 files changed, 431 insertions(+), 9 deletions(-) diff --git a/billingstack/__init__.py b/billingstack/__init__.py index e69de29..f7ed5c6 100644 --- a/billingstack/__init__.py +++ b/billingstack/__init__.py @@ -0,0 +1,15 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/billingstack/api/base.py b/billingstack/api/base.py index 9f8d036..8ba2c79 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -1,3 +1,19 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + import functools import mimetypes diff --git a/billingstack/api/utils.py b/billingstack/api/utils.py index 85346fe..d0bc991 100644 --- a/billingstack/api/utils.py +++ b/billingstack/api/utils.py @@ -1,5 +1,7 @@ # -*- encoding: utf-8 -*- -## +# +# Author: Endre Karlson +# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at diff --git a/billingstack/biller/__init__.py b/billingstack/biller/__init__.py index 6bbc360..7c6e629 100644 --- a/billingstack/biller/__init__.py +++ b/billingstack/biller/__init__.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg cfg.CONF.register_group(cfg.OptGroup( diff --git a/billingstack/biller/rpcapi.py b/billingstack/biller/rpcapi.py index 85f69e4..30e5b63 100644 --- a/billingstack/biller/rpcapi.py +++ b/billingstack/biller/rpcapi.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.openstack.common.rpc import proxy diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py index 9ff366a..71cf27f 100644 --- a/billingstack/biller/service.py +++ b/billingstack/biller/service.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service diff --git a/billingstack/biller/storage/__init__.py b/billingstack/biller/storage/__init__.py index 0ad0cfc..5aef594 100644 --- a/billingstack/biller/storage/__init__.py +++ b/billingstack/biller/storage/__init__.py @@ -1,3 +1,19 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + from oslo.config import cfg from billingstack.storage import base diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py index 394adf5..aaf25c7 100644 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -1,3 +1,5 @@ +# -*- encoding: utf-8 -*- +# # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/central/__init__.py b/billingstack/central/__init__.py index ecdf98b..e7b0032 100644 --- a/billingstack/central/__init__.py +++ b/billingstack/central/__init__.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg cfg.CONF.register_group(cfg.OptGroup( diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 461dd96..604fdc8 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.openstack.common.rpc import proxy diff --git a/billingstack/central/service.py b/billingstack/central/service.py index f18c855..e055b37 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. import functools from oslo.config import cfg from billingstack.openstack.common import log as logging diff --git a/billingstack/central/storage/__init__.py b/billingstack/central/storage/__init__.py index c62cee2..d58b705 100644 --- a/billingstack/central/storage/__init__.py +++ b/billingstack/central/storage/__init__.py @@ -13,6 +13,7 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# # Copied: Moniker from oslo.config import cfg from billingstack.openstack.common import log as logging diff --git a/billingstack/central/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py index a73ef88..49819f8 100644 --- a/billingstack/central/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/central/storage/impl_sqlalchemy/__init__.py @@ -1,3 +1,5 @@ +# -*- encoding: utf-8 -*- +# # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/central/storage/impl_sqlalchemy/models.py b/billingstack/central/storage/impl_sqlalchemy/models.py index f745f95..5252331 100644 --- a/billingstack/central/storage/impl_sqlalchemy/models.py +++ b/billingstack/central/storage/impl_sqlalchemy/models.py @@ -1,3 +1,5 @@ +# -*- encoding: utf-8 -*- +# # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/collector/__init__.py b/billingstack/collector/__init__.py index 322c56b..c3aaa39 100644 --- a/billingstack/collector/__init__.py +++ b/billingstack/collector/__init__.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg cfg.CONF.register_group(cfg.OptGroup( diff --git a/billingstack/collector/rpcapi.py b/billingstack/collector/rpcapi.py index 3d4d99e..f678f6e 100644 --- a/billingstack/collector/rpcapi.py +++ b/billingstack/collector/rpcapi.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.openstack.common.rpc import proxy diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index b302d30..906d6ac 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. """ A service that does calls towards the PGP web endpoint or so """ diff --git a/billingstack/conf.py b/billingstack/conf.py index fd88d31..0e56443 100644 --- a/billingstack/conf.py +++ b/billingstack/conf.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. import os from oslo.config import cfg diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py index e229b90..3ca9d50 100644 --- a/billingstack/exceptions.py +++ b/billingstack/exceptions.py @@ -1,6 +1,9 @@ +# -*- encoding: utf-8 -*- +# # Copyright 2012 Managed I.T. # # Author: Kiall Mac Innes +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain diff --git a/billingstack/manage/__init__.py b/billingstack/manage/__init__.py index 06f4f6b..92d5c66 100644 --- a/billingstack/manage/__init__.py +++ b/billingstack/manage/__init__.py @@ -13,6 +13,8 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# +# Copied: Moniker from oslo.config import cfg from cliff.app import App from cliff.commandmanager import CommandManager diff --git a/billingstack/manage/base.py b/billingstack/manage/base.py index 76402e7..e28e566 100644 --- a/billingstack/manage/base.py +++ b/billingstack/manage/base.py @@ -13,6 +13,8 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +# +# Copied: Moniker from cliff.command import Command as CliffCommand from cliff.lister import Lister from cliff.show import ShowOne diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py index 06c00cf..667240e 100644 --- a/billingstack/manage/database.py +++ b/billingstack/manage/database.py @@ -1,3 +1,5 @@ +# -*- encoding: utf-8 -*- +# # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -11,8 +13,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - - from oslo.config import cfg from billingstack.openstack.common import log from billingstack.manage.base import Command diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py index 8251fb6..ac31ad8 100644 --- a/billingstack/manage/provider.py +++ b/billingstack/manage/provider.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from billingstack.openstack.common.context import get_admin_context from billingstack.payment_gateway import register_providers from billingstack.manage.base import ListCommand diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py index 3e47b87..b9f13ac 100644 --- a/billingstack/payment_gateway/__init__.py +++ b/billingstack/payment_gateway/__init__.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from stevedore.extension import ExtensionManager from billingstack import exceptions diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py index f60ce2f..5a9f229 100644 --- a/billingstack/payment_gateway/base.py +++ b/billingstack/payment_gateway/base.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from billingstack.central.storage import get_connection from billingstack.plugin import Plugin diff --git a/billingstack/payment_gateway/dummy.py b/billingstack/payment_gateway/dummy.py index 3cc2997..7cd373e 100644 --- a/billingstack/payment_gateway/dummy.py +++ b/billingstack/payment_gateway/dummy.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from billingstack.payment_gateway.base import Provider diff --git a/billingstack/plugin.py b/billingstack/plugin.py index 31560ab..ee92afb 100644 --- a/billingstack/plugin.py +++ b/billingstack/plugin.py @@ -1,6 +1,6 @@ -# Copyright 2012 Bouvet ASA +# -*- encoding: utf-8 -*- # -# Author: Endre Karlson +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain diff --git a/billingstack/rater/__init__.py b/billingstack/rater/__init__.py index 4b07b70..ef1989d 100644 --- a/billingstack/rater/__init__.py +++ b/billingstack/rater/__init__.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg cfg.CONF.register_group(cfg.OptGroup( diff --git a/billingstack/rater/rpcapi.py b/billingstack/rater/rpcapi.py index 87868ae..2e53c78 100644 --- a/billingstack/rater/rpcapi.py +++ b/billingstack/rater/rpcapi.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.openstack.common.rpc import proxy diff --git a/billingstack/rater/service.py b/billingstack/rater/service.py index 0181d93..c450c6f 100644 --- a/billingstack/rater/service.py +++ b/billingstack/rater/service.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service diff --git a/billingstack/rater/storage/__init__.py b/billingstack/rater/storage/__init__.py index 3f01725..078b25c 100644 --- a/billingstack/rater/storage/__init__.py +++ b/billingstack/rater/storage/__init__.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from oslo.config import cfg from billingstack.storage import base diff --git a/billingstack/rater/storage/impl_sqlalchemy.py b/billingstack/rater/storage/impl_sqlalchemy.py index f6a503f..4ebac66 100644 --- a/billingstack/rater/storage/impl_sqlalchemy.py +++ b/billingstack/rater/storage/impl_sqlalchemy.py @@ -1,3 +1,5 @@ +# -*- encoding: utf-8 -*- +# # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/billingstack/samples.py b/billingstack/samples.py index 333d134..b55fe6c 100644 --- a/billingstack/samples.py +++ b/billingstack/samples.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. import glob import os.path diff --git a/billingstack/service.py b/billingstack/service.py index ce3bfaa..182bdb9 100644 --- a/billingstack/service.py +++ b/billingstack/service.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Copyright © 2012 eNovance @@ -16,7 +15,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. - from oslo.config import cfg from billingstack.openstack.common import rpc from billingstack.openstack.common import context diff --git a/billingstack/sqlalchemy/__init__.py b/billingstack/sqlalchemy/__init__.py index e69de29..f7ed5c6 100644 --- a/billingstack/sqlalchemy/__init__.py +++ b/billingstack/sqlalchemy/__init__.py @@ -0,0 +1,15 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py index b123b71..a9c44be 100644 --- a/billingstack/sqlalchemy/api.py +++ b/billingstack/sqlalchemy/api.py @@ -1,3 +1,5 @@ +# -*- encoding: utf-8 -*- +# # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may @@ -10,8 +12,7 @@ # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations -# under the License - +# under the License. from sqlalchemy.orm import exc diff --git a/billingstack/sqlalchemy/utils.py b/billingstack/sqlalchemy/utils.py index 1357869..50e0465 100644 --- a/billingstack/sqlalchemy/utils.py +++ b/billingstack/sqlalchemy/utils.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from sqlalchemy.orm.properties import ColumnProperty, RelationshipProperty from billingstack.openstack.common import uuidutils diff --git a/billingstack/storage/__init__.py b/billingstack/storage/__init__.py index e69de29..f7ed5c6 100644 --- a/billingstack/storage/__init__.py +++ b/billingstack/storage/__init__.py @@ -0,0 +1,15 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/billingstack/storage/filterer.py b/billingstack/storage/filterer.py index aecf248..f04b5bc 100644 --- a/billingstack/storage/filterer.py +++ b/billingstack/storage/filterer.py @@ -1,3 +1,18 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. from billingstack import exceptions from billingstack.openstack.common import log diff --git a/billingstack/utils.py b/billingstack/utils.py index dd6bb32..ca429cb 100644 --- a/billingstack/utils.py +++ b/billingstack/utils.py @@ -1,3 +1,16 @@ +# -*- encoding: utf-8 -*- +## +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. import os import pycountry import re From 84450e6d22e57bd63f17e47c7f5a9808d309e610 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 2 Jun 2013 14:00:10 +0200 Subject: [PATCH 137/182] Use console scripts for backends Change-Id: Ib82a81189f7ae43749fd3e7a4171a23f69b7cae6 --- billingstack/central/service.py | 11 +++++++++++ billingstack/collector/service.py | 11 +++++++++++ billingstack/rater/service.py | 11 +++++++++++ billingstack/service.py | 15 ++++++++++++++ bin/billingstack-biller | 32 ------------------------------ bin/billingstack-central | 33 ------------------------------- bin/billingstack-collector | 32 ------------------------------ bin/billingstack-rater | 33 ------------------------------- setup.cfg | 10 ++++++---- 9 files changed, 54 insertions(+), 134 deletions(-) delete mode 100644 bin/billingstack-biller delete mode 100644 bin/billingstack-central delete mode 100644 bin/billingstack-collector delete mode 100644 bin/billingstack-rater diff --git a/billingstack/central/service.py b/billingstack/central/service.py index e055b37..e03785f 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -14,10 +14,14 @@ # License for the specific language governing permissions and limitations # under the License. import functools +import sys + from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service +from billingstack.openstack.common import service as os_service from billingstack.central import storage +from billingstack import service as bs_service cfg.CONF.import_opt('central_topic', 'billingstack.central.rpcapi') @@ -295,3 +299,10 @@ def update_subscription(self, ctxt, id_, values): def delete_subscription(self, ctxt, id_): return self.storage_conn.delete_subscription(ctxt, id_) + + +def launch(): + bs_service.prepare_service(sys.argv) + launcher = os_service.launch(Service(), + cfg.CONF['service:central'].workers) + launcher.wait() diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index 906d6ac..d8d962a 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -18,10 +18,14 @@ """ import functools +import sys + from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service +from billingstack.openstack.common import service as os_service from billingstack.central.rpcapi import CentralAPI +from billingstack import service as bs_service cfg.CONF.import_opt('host', 'billingstack.netconf') @@ -79,3 +83,10 @@ def _wrapper(*args, **kw): return f(*args, **kw) setattr(self, name, _wrapper) return _wrapper + + +def launch(): + bs_service.prepare_service(sys.argv) + launcher = os_service.launch(Service(), + cfg.CONF['service:collector'].workers) + launcher.wait() diff --git a/billingstack/rater/service.py b/billingstack/rater/service.py index c450c6f..dbc4c63 100644 --- a/billingstack/rater/service.py +++ b/billingstack/rater/service.py @@ -13,10 +13,14 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import sys + from oslo.config import cfg from billingstack.openstack.common import log as logging +from billingstack.openstack.common import service as os_service from billingstack.openstack.common.rpc import service as rpc_service from billingstack.rater import storage +from billingstack import service as bs_service cfg.CONF.import_opt('rater_topic', 'billingstack.rater.rpcapi') @@ -60,3 +64,10 @@ def update_usage(self, ctxt, id_, values): def delete_usage(self, ctxt, id_): return self.storage_conn.delete_usage(ctxt, id_) + + +def launch(): + bs_service.prepare_service(sys.argv) + launcher = os_service.launch(Service(), + cfg.CONF['service:rater'].workers) + launcher.wait() diff --git a/billingstack/service.py b/billingstack/service.py index 182bdb9..f728a7a 100644 --- a/billingstack/service.py +++ b/billingstack/service.py @@ -15,11 +15,15 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import eventlet +import sys + from oslo.config import cfg from billingstack.openstack.common import rpc from billingstack.openstack.common import context from billingstack.openstack.common import log from billingstack.openstack.common.rpc import service as rpc_service +from billingstack import utils cfg.CONF.register_opts([ @@ -42,6 +46,17 @@ def start(self): def prepare_service(argv=[]): + eventlet.monkey_patch() + utils.read_config('billingstack', sys.argv) + rpc.set_defaults(control_exchange='billingstack') + cfg.set_defaults(log.log_opts, + default_log_levels=['amqplib=WARN', + 'qpid.messaging=INFO', + 'sqlalchemy=WARN', + 'keystoneclient=INFO', + 'stevedore=INFO', + 'eventlet.wsgi.server=WARN' + ]) cfg.CONF(argv[1:], project='billingstack') log.setup('billingstack') diff --git a/bin/billingstack-biller b/bin/billingstack-biller deleted file mode 100644 index 0478d16..0000000 --- a/bin/billingstack-biller +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys -import eventlet -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack import utils -from billingstack.biller import service as biller_service - -eventlet.monkey_patch() - -utils.read_config('billingstack', sys.argv) - -logging.setup('billingstack') - -launcher = service.launch(biller_service.Service(), - cfg.CONF['service:biller'].workers) -launcher.wait() diff --git a/bin/billingstack-central b/bin/billingstack-central deleted file mode 100644 index 2f4eed1..0000000 --- a/bin/billingstack-central +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys -import eventlet -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack import utils -from billingstack.central import service as central_service - -eventlet.monkey_patch() - -utils.read_config('billingstack', sys.argv) - -logging.setup('billingstack') - -launcher = service.launch(central_service.Service(), - cfg.CONF['service:central'].workers) -launcher.wait() diff --git a/bin/billingstack-collector b/bin/billingstack-collector deleted file mode 100644 index 4a73d2c..0000000 --- a/bin/billingstack-collector +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys -import eventlet -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack import utils -from billingstack.collector import service as collector_service - -eventlet.monkey_patch() - -utils.read_config('billingstack', sys.argv) - -logging.setup('billingstack') - -launcher = service.launch(collector_service.Service(), - cfg.CONF['service:collector'].workers) -launcher.wait() diff --git a/bin/billingstack-rater b/bin/billingstack-rater deleted file mode 100644 index 86df9b1..0000000 --- a/bin/billingstack-rater +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys -import eventlet -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack import utils -from billingstack.rater import service as rater_service - -eventlet.monkey_patch() - -utils.read_config('billingstack', sys.argv) - -logging.setup('billingstack') - -launcher = service.launch(rater_service.Service(), - cfg.CONF['service:rater'].workers) -launcher.wait() diff --git a/setup.cfg b/setup.cfg index 019b3bc..9158949 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,12 +28,14 @@ scripts = bin/billingstack-db-manage bin/billingstack-manage bin/billingstack-api - bin/billingstack-central - bin/billingstack-biller - bin/billingstack-collector - bin/billingstack-rater [entry_points] +console_scripts = + billingstack-biller = billingstack.biller.service:launch + billingstack-central = billingstack.central.service:launch + billingstack-collector = billingstack.collector.service:launch + billingstack-rater = billingstack.rater.service:launch + billingstack.central.storage = sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine From facdc71b2701c10380a05c3e8cd686767fab4b2b Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 2 Jun 2013 23:51:33 +0200 Subject: [PATCH 138/182] Run helper Change-Id: I77ae32408a3241f224b73dfb8f1d6402334d95c9 --- .gitignore | 4 + tools/control.sh | 217 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 221 insertions(+) create mode 100755 tools/control.sh diff --git a/.gitignore b/.gitignore index 6dcb3e5..d04a09b 100644 --- a/.gitignore +++ b/.gitignore @@ -47,3 +47,7 @@ etc/billingstack/*.ini etc/billingstack/*.conf billingstack/versioninfo *.sqlite + + +billingstack-screenrc +run/ diff --git a/tools/control.sh b/tools/control.sh new file mode 100755 index 0000000..5771162 --- /dev/null +++ b/tools/control.sh @@ -0,0 +1,217 @@ +#!/usr/bin/env bash + +# script to help with BS + +# Dependencies: +# - functions + +# Save trace setting +XTRACE=$(set +o | grep xtrace) +set -x + +# Keep track of this directory +TOOL_DIR=$(cd $(dirname "$0") && pwd) +TOP_DIR=$TOOL_DIR/.. + +RUN_DIR=$TOP_DIR/run + +SCREEN_NAME=billingstack +SCREEN_LOGDIR=$RUN_DIR + +CONF_DIR=$TOP_DIR/etc/billingstack +CONFIG=${CONFIG:-$CONF_DIR/billingstack.conf} + + +function ensure_dir() { + local dir=$1 + [ ! -d "$dir" ] && { + echo "Attempting to create $dir" + mkdir -p $dir + } +} + + +# Normalize config values to True or False +# Accepts as False: 0 no false False FALSE +# Accepts as True: 1 yes true True TRUE +# VAR=$(trueorfalse default-value test-value) +function trueorfalse() { + local default=$1 + local testval=$2 + + [[ -z "$testval" ]] && { echo "$default"; return; } + [[ "0 no false False FALSE" =~ "$testval" ]] && { echo "False"; return; } + [[ "1 yes true True TRUE" =~ "$testval" ]] && { echo "True"; return; } + echo "$default" +} + + +# _run_process() is designed to be backgrounded by run_process() to simulate a +# fork. It includes the dirty work of closing extra filehandles and preparing log +# files to produce the same logs as screen_it(). The log filename is derived +# from the service name and global-and-now-misnamed SCREEN_LOGDIR +# _run_process service "command-line" +function _run_process() { + local service=$1 + local command="$2" + + # Undo logging redirections and close the extra descriptors + exec 1>&3 + exec 2>&3 + exec 3>&- + exec 6>&- + + if [[ -n ${SCREEN_LOGDIR} ]]; then + exec 1>&${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log 2>&1 + ln -sf ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log ${SCREEN_LOGDIR}/screen-${1}.log + + # TODO(dtroyer): Hack to get stdout from the Python interpreter for the logs. + export PYTHONUNBUFFERED=1 + fi + + exec /bin/bash -c "$command" + die "$service exec failure: $command" +} + + +# run_process() launches a child process that closes all file descriptors and +# then exec's the passed in command. This is meant to duplicate the semantics +# of screen_it() without screen. PIDs are written to +# $SERVICE_DIR/$SCREEN_NAME/$service.pid +# run_process service "command-line" +function run_process() { + local service=$1 + local command="$2" + + # Spawn the child process + _run_process "$service" "$command" & + echo $! +} + + + +# Helper to launch a service in a named screen +# screen_it service "command-line" +function screen_it { + SCREEN_NAME=${SCREEN_NAME:-stack} + SERVICE_DIR=${SERVICE_DIR:-${RUN_DIR}/status} + USE_SCREEN=$(trueorfalse True $USE_SCREEN) + + if is_service_enabled $1; then + # Append the service to the screen rc file + screen_rc "$1" "$2" + + if [[ "$USE_SCREEN" = "True" ]]; then + screen -S $SCREEN_NAME -X screen -t $1 + + if [[ -n ${SCREEN_LOGDIR} ]]; then + screen -S $SCREEN_NAME -p $1 -X logfile ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log + screen -S $SCREEN_NAME -p $1 -X log on + ln -sf ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log ${SCREEN_LOGDIR}/screen-${1}.log + fi + + # sleep to allow bash to be ready to be send the command - we are + # creating a new window in screen and then sends characters, so if + # bash isn't running by the time we send the command, nothing happens + sleep 1.5 + + NL=`echo -ne '\015'` + screen -S $SCREEN_NAME -p $1 -X stuff "$2 || touch \"$SERVICE_DIR/$SCREEN_NAME/$1.failure\"$NL" + else + # Spawn directly without screen + run_process "$1" "$2" >$SERVICE_DIR/$SCREEN_NAME/$service.pid + fi + fi +} + + +# Screen rc file builder +# screen_rc service "command-line" +function screen_rc { + SCREEN_NAME=${SCREEN_NAME:-stack} + SCREENRC=$TOP_DIR/$SCREEN_NAME-screenrc + if [[ ! -e $SCREENRC ]]; then + # Name the screen session + echo "sessionname $SCREEN_NAME" > $SCREENRC + # Set a reasonable statusbar + echo "hardstatus alwayslastline '$SCREEN_HARDSTATUS'" >> $SCREENRC + echo "screen -t shell bash" >> $SCREENRC + fi + # If this service doesn't already exist in the screenrc file + if ! grep $1 $SCREENRC 2>&1 > /dev/null; then + NL=`echo -ne '\015'` + echo "screen -t $1 bash" >> $SCREENRC + echo "stuff \"$2$NL\"" >> $SCREENRC + fi +} + +# Uses global ``ENABLED_SERVICES`` +# is_service_enabled service [service ...] +function is_service_enabled() { + services=$@ + return 0 +} + + +function screen_setup() { + ensure_dir $SCREEN_LOGDIR + + # Check to see if we are already running DevStack + # Note that this may fail if USE_SCREEN=False + if type -p screen >/dev/null && screen -ls | egrep -q "[0-9].$SCREEN_NAME"; then + echo "You are already running a stack.sh session." + echo "To rejoin this session type 'screen -x stack'." + echo "To destroy this session, type './unstack.sh'." + exit 1 + fi + + USE_SCREEN=$(trueorfalse True $USE_SCREEN) + echo $USE_SCREEN + if [[ "$USE_SCREEN" == "True" ]]; then + # Create a new named screen to run processes in + screen -d -m -S $SCREEN_NAME -t shell -s /bin/bash + sleep 1 + + # Set a reasonable status bar + if [ -z "$SCREEN_HARDSTATUS" ]; then + SCREEN_HARDSTATUS='%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})' + fi + screen -r $SCREEN_NAME -X hardstatus alwayslastline "$SCREEN_HARDSTATUS" + fi + + # Clear screen rc file + SCREENRC=$TOP_DIR/$SCREEN_NAME-screenrc + if [[ -e $SCREENRC ]]; then + echo -n > $SCREENRC + fi +} + + +function screen_destroy() { + SCREEN=$(which screen) + if [[ -n "$SCREEN" ]]; then + SESSION=$(screen -ls | awk '/[0-9].billingstack/ { print $1 }') + if [[ -n "$SESSION" ]]; then + screen -X -S $SESSION quit + fi + fi +} + + +function prereq_setup() { + ensure_dir $RUN_DIR + ensure_dir $SCREEN_DIR +} + + +case $1 in + start) + prereq_setup + screen_setup + screen_it bs-central "$TOOL_DIR/with_venv.sh billingstack-central --config-file $CONFIG" + screen_it bs-api "$TOOL_DIR/with_venv.sh billingstack-api --config-file $CONFIG" + ;; + stop) + screen_destroy + ;; +esac \ No newline at end of file From 11cd25a051de3695d242439c21d56cec6c6eba2d Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 8 Jun 2013 15:31:20 +0200 Subject: [PATCH 139/182] Use Oslo JSON Change-Id: If98cabd34a6c68173ac59a784325fe4ab431a347 --- billingstack/samples.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/samples.py b/billingstack/samples.py index b55fe6c..3e18e57 100644 --- a/billingstack/samples.py +++ b/billingstack/samples.py @@ -16,7 +16,7 @@ import glob import os.path -import anyjson as json +from billingstack.openstack.common import jsonutils as json DIR = os.path.join(os.path.dirname(__file__), 'samples_data') From 2dc89d3997d338a8cff9b4c43e2cbc7c95e0c46b Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 8 Jun 2013 15:33:36 +0200 Subject: [PATCH 140/182] Make it executable Change-Id: Icd3d72badbc78e525cb0da1dc616626ce4ae2af7 --- bin/billingstack-manage | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 bin/billingstack-manage diff --git a/bin/billingstack-manage b/bin/billingstack-manage old mode 100644 new mode 100755 From 048f6a08412e18d8b91b28ee56c9727b99da1780 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 8 Jun 2013 16:32:57 +0200 Subject: [PATCH 141/182] Better functionality for start Change-Id: I9651efe4362d51632193fa046644a53077d8a5d0 --- tools/control.sh | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/tools/control.sh b/tools/control.sh index 5771162..f55376f 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -21,6 +21,7 @@ SCREEN_LOGDIR=$RUN_DIR CONF_DIR=$TOP_DIR/etc/billingstack CONFIG=${CONFIG:-$CONF_DIR/billingstack.conf} +SERVICES="api,central,rater,biller,collector" function ensure_dir() { local dir=$1 @@ -204,14 +205,32 @@ function prereq_setup() { } +function start_svc() { + svc="$(echo "$svc" | sed 's/bs-//')" + screen_it bs-$svc "billingstack-$svc --config-file $CONFIG" +} + + +function start() { + local svc=$1 + [ "$svc" == 'all' ] && { + for s in $SERVICES; do + start_svc $s + done + return + } + start_svc $svc +} + + case $1 in start) prereq_setup screen_setup - screen_it bs-central "$TOOL_DIR/with_venv.sh billingstack-central --config-file $CONFIG" - screen_it bs-api "$TOOL_DIR/with_venv.sh billingstack-api --config-file $CONFIG" + + start $2 ;; stop) screen_destroy ;; -esac \ No newline at end of file +esac From 850feaf0199126d37fc5d273bb74cb783e50ae1a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 8 Jun 2013 19:14:01 +0200 Subject: [PATCH 142/182] Start all by default Change-Id: Ieb2d140ead15da75f7009a0355e4758fdfd54ebb --- tools/control.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/control.sh b/tools/control.sh index f55376f..ce16a31 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -228,6 +228,8 @@ case $1 in prereq_setup screen_setup + svc=$2 + [ -z "$svc" ] && svc=all start $2 ;; stop) From 5c3a6efa3ca7eec18535f715b0a230e39eefc4d4 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 8 Jun 2013 21:32:51 +0200 Subject: [PATCH 143/182] Echo out service name and remove unused ensure Change-Id: I9a041e0558b6bfbc94d3b500b49a08878d95157c --- tools/control.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/control.sh b/tools/control.sh index ce16a31..e03949d 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -201,12 +201,12 @@ function screen_destroy() { function prereq_setup() { ensure_dir $RUN_DIR - ensure_dir $SCREEN_DIR } function start_svc() { svc="$(echo "$svc" | sed 's/bs-//')" + echo "Starting service: $svc" screen_it bs-$svc "billingstack-$svc --config-file $CONFIG" } @@ -230,6 +230,7 @@ case $1 in svc=$2 [ -z "$svc" ] && svc=all + echo "Starting service(s): $svc" start $2 ;; stop) From 5d61c711a53cab8fc88e614b819e70d8689b7bad Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 8 Jun 2013 22:05:19 +0200 Subject: [PATCH 144/182] Fix bugs with start. Change-Id: I4e3ad7443f07de390c368e483d1e5e4f754857dd --- tools/control.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/control.sh b/tools/control.sh index e03949d..5268a2b 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -205,7 +205,7 @@ function prereq_setup() { function start_svc() { - svc="$(echo "$svc" | sed 's/bs-//')" + svc="$(echo "$1" | sed 's/bs-//')" echo "Starting service: $svc" screen_it bs-$svc "billingstack-$svc --config-file $CONFIG" } @@ -214,7 +214,7 @@ function start_svc() { function start() { local svc=$1 [ "$svc" == 'all' ] && { - for s in $SERVICES; do + for s in $(echo "$SERVICES" | tr ',' ' '); do start_svc $s done return @@ -231,7 +231,7 @@ case $1 in svc=$2 [ -z "$svc" ] && svc=all echo "Starting service(s): $svc" - start $2 + start $svc ;; stop) screen_destroy From 1d5467962d8137c15b153016dff36a4c036d6c81 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 9 Jun 2013 00:25:22 +0200 Subject: [PATCH 145/182] Cutdown on settings Change-Id: I5c062fcf9fef5847a4ff693c598308decb58c857 --- tools/control.sh | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/tools/control.sh b/tools/control.sh index 5268a2b..9fc53e6 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -10,16 +10,13 @@ XTRACE=$(set +o | grep xtrace) set -x # Keep track of this directory -TOOL_DIR=$(cd $(dirname "$0") && pwd) -TOP_DIR=$TOOL_DIR/.. +SCRIPT_DIR=$(cd $(dirname "$0") && pwd) +BASE_DIR=${BASE_DIR:-$SCRIPT_DIR/..} -RUN_DIR=$TOP_DIR/run +SCREEN_NAME=${SCREEN_NAME:-billingstack} +SCREEN_LOGDIR=${SCREEN_LOGDIR:-$BASE_DIR/logs} -SCREEN_NAME=billingstack -SCREEN_LOGDIR=$RUN_DIR - -CONF_DIR=$TOP_DIR/etc/billingstack -CONFIG=${CONFIG:-$CONF_DIR/billingstack.conf} +CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack.conf} SERVICES="api,central,rater,biller,collector" @@ -95,7 +92,7 @@ function run_process() { # screen_it service "command-line" function screen_it { SCREEN_NAME=${SCREEN_NAME:-stack} - SERVICE_DIR=${SERVICE_DIR:-${RUN_DIR}/status} + SERVICE_DIR=${SERVICE_DIR:-$BASE_DIR/status} USE_SCREEN=$(trueorfalse True $USE_SCREEN) if is_service_enabled $1; then @@ -199,11 +196,6 @@ function screen_destroy() { } -function prereq_setup() { - ensure_dir $RUN_DIR -} - - function start_svc() { svc="$(echo "$1" | sed 's/bs-//')" echo "Starting service: $svc" From 78837d96eeb2e0d6ce2822cfff7ac1a8619f555a Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 9 Jun 2013 00:44:11 +0200 Subject: [PATCH 146/182] TOP_DIR > BASE_DIR Change-Id: I3a0b412e77d85e3d120d7a8276ff172c3aac47ca --- tools/control.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/control.sh b/tools/control.sh index 9fc53e6..e0cb9cc 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -127,7 +127,7 @@ function screen_it { # screen_rc service "command-line" function screen_rc { SCREEN_NAME=${SCREEN_NAME:-stack} - SCREENRC=$TOP_DIR/$SCREEN_NAME-screenrc + SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc if [[ ! -e $SCREENRC ]]; then # Name the screen session echo "sessionname $SCREEN_NAME" > $SCREENRC @@ -178,7 +178,7 @@ function screen_setup() { fi # Clear screen rc file - SCREENRC=$TOP_DIR/$SCREEN_NAME-screenrc + SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc if [[ -e $SCREENRC ]]; then echo -n > $SCREENRC fi From bbbb1d0b0d19c476b1649b3672f69e8a0a612af3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 9 Jun 2013 03:23:21 +0200 Subject: [PATCH 147/182] Use prepare_service() Change-Id: If979dd573e924d85d7aa86674b54f5b937992b53 --- bin/billingstack-api | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bin/billingstack-api b/bin/billingstack-api index 639349d..c80bb1c 100644 --- a/bin/billingstack-api +++ b/bin/billingstack-api @@ -24,12 +24,11 @@ from billingstack.openstack.common import log as logging from billingstack.openstack.common import service from billingstack import utils from billingstack.api import service as api_service +from billingstack.service import prepare_service -eventlet.monkey_patch() -utils.read_config('billingstack', sys.argv) +prepare_service(sys.argv) -logging.setup('billingstack') logging.setup('wsme') launcher = service.launch(api_service.Service(), From 5bf614d33020eb19f087f9b3088d1407353fa298 Mon Sep 17 00:00:00 2001 From: Luis Gervaso Date: Sun, 9 Jun 2013 04:12:00 +0200 Subject: [PATCH 148/182] added mac osx installation details Change-Id: Ib5b41a219b7fc69ddfc52edca691f403c15ca7d6 --- doc/source/install/index.rst | 1 + doc/source/install/macos.rst | 144 +++++++++++++++++++++++++++++++++++ 2 files changed, 145 insertions(+) create mode 100644 doc/source/install/macos.rst diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst index 18f4d97..29673b6 100644 --- a/doc/source/install/index.rst +++ b/doc/source/install/index.rst @@ -24,4 +24,5 @@ common manual + macos pgp diff --git a/doc/source/install/macos.rst b/doc/source/install/macos.rst new file mode 100644 index 0000000..13c3a76 --- /dev/null +++ b/doc/source/install/macos.rst @@ -0,0 +1,144 @@ +.. + Copyright 2013 Luis Gervaso + + Licensed under the Apache License, Version 2.0 (the "License"); you may + not use this file except in compliance with the License. You may obtain + a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations + under the License. + + + +============================= + Installing Manually (Mac OS) +============================= + +Common Steps +============ + +.. index:: + double: installing; common_steps + +.. note:: + The below operations should take place underneath your /etc folder. + +0. Install Homebrew + + Please, follow the steps described `here `_ + +1. Install system package dependencies:: + + $ brew install python --framework + $ brew install rabbitmq + +.. note:: + To have launchd start rabbitmq at login: + ln -sfv /usr/local/opt/rabbitmq/*.plist ~/Library/LaunchAgents + Then to load rabbitmq now: + launchctl load ~/Library/LaunchAgents/homebrew.mxcl.rabbitmq.plist + Or, if you don't want/need launchctl, you can just run: + rabbitmq-server + + $ rabbitmq-server + + RabbitMQ 3.1.1. Copyright (C) 2007-2013 VMware, Inc. + ## ## Licensed under the MPL. See http://www.rabbitmq.com/ + ## ## + ########## Logs: /usr/local/var/log/rabbitmq/rabbit@localhost.log + ###### ## /usr/local/var/log/rabbitmq/rabbit@localhost-sasl.log + ########## + Starting broker... completed with 7 plugins. + + $ brew install mysql + +2. Clone the BillingStack repo off of Github:: + + $ git clone https://github.com/billingstack/billingstack.git + $ cd billingstack + +3. Setup virtualenv and Install BillingStack and it's dependencies:: + +.. note:: + This is to not interfere with system packages etc. + + $ pip install virtualenv + $ python tools/install_venv.py + $ . .venv/bin/activate + $ python setup.py develop + +.. note:: + ValueError: unknown locale: UTF-8. To fix it you will have to set these environment variables in your ~/.profile or ~/.bashrc manually: + .profile + export LANG=en_US.UTF-8 + export LC_ALL=en_US.UTF-8 + + Copy sample configs to usable ones, inside the `etc` folder do:: + + $ sudo cp -r etc/billingstack /etc + $ cd /etc/billingstack + $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done + +.. note:: + Change the wanted configuration settings to match your environment, the file + is in the `/etc/billingstack` folder:: + + $ vi /etc/billingstack/billingstack.conf + + +Installing Central +================== + +.. index:: + double: installing; central + +.. note:: + This is needed because it is the service that the API and others uses to + communicate with to do stuff in the Database. + +1. See `Common Steps`_ before proceeding. + +2. Create the DB for :term:`central`:: + + $ python tools/resync_billingstack.py + +3. Now you might want to load sample data for the time being:: + + $ python tools/load_samples.py + +4. Start the central service:: + + $ billingstack-central + + ... + + 2013-06-09 03:51:22 DEBUG [amqp] Open OK! + 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 + 2013-06-09 03:51:22 DEBUG [amqp] Channel open + 2013-06-09 03:51:22 INFO [billingstack.openstack.common.rpc.common] Connected to AMQP server on localhost:5672 + 2013-06-09 03:51:22 DEBUG [billingstack.openstack.common.rpc.service] Creating Consumer connection for Service central + + +Installing the API +==================== + +.. index:: + double: installing; api + +.. note:: + The API Server needs to able to talk via MQ to other services. + +1. See `Common Steps`_ before proceeding. + +2. Start the API service:: + + $ billingstack-api + + ... + + 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file From e2b6c7c096af3a4267315e3f646824fea1ababbc Mon Sep 17 00:00:00 2001 From: Luis Gervaso Date: Mon, 10 Jun 2013 02:17:57 +0200 Subject: [PATCH 149/182] sync both linux and mac documentation Change-Id: Id93d2035f72da036316733924f6f8b0d34ff66a9 --- doc/source/install/macos.rst | 117 ++++++++++++++++++++-------------- doc/source/install/manual.rst | 90 +++++++++++++++----------- 2 files changed, 124 insertions(+), 83 deletions(-) diff --git a/doc/source/install/macos.rst b/doc/source/install/macos.rst index 13c3a76..23b98e8 100644 --- a/doc/source/install/macos.rst +++ b/doc/source/install/macos.rst @@ -34,60 +34,75 @@ Common Steps 1. Install system package dependencies:: - $ brew install python --framework - $ brew install rabbitmq + $ brew install python --framework + $ brew install rabbitmq -.. note:: - To have launchd start rabbitmq at login: + .. note:: + + To have launchd start rabbitmq at login: ln -sfv /usr/local/opt/rabbitmq/*.plist ~/Library/LaunchAgents - Then to load rabbitmq now: + Then to load rabbitmq now: launchctl load ~/Library/LaunchAgents/homebrew.mxcl.rabbitmq.plist - Or, if you don't want/need launchctl, you can just run: + Or, if you don't want/need launchctl, you can just run: rabbitmq-server - $ rabbitmq-server + Start RabbitMQ:: + + $ rabbitmq-server - RabbitMQ 3.1.1. Copyright (C) 2007-2013 VMware, Inc. - ## ## Licensed under the MPL. See http://www.rabbitmq.com/ - ## ## - ########## Logs: /usr/local/var/log/rabbitmq/rabbit@localhost.log - ###### ## /usr/local/var/log/rabbitmq/rabbit@localhost-sasl.log - ########## - Starting broker... completed with 7 plugins. + RabbitMQ 3.1.1. Copyright (C) 2007-2013 VMware, Inc. - $ brew install mysql + ## ## Licensed under the MPL. See http://www.rabbitmq.com/ + ## ## + ########## Logs: /usr/local/var/log/rabbitmq/rabbit@localhost.log + ###### ## /usr/local/var/log/rabbitmq/rabbit@localhost-sasl.log + ########## + + Starting broker... completed with 7 plugins. 2. Clone the BillingStack repo off of Github:: $ git clone https://github.com/billingstack/billingstack.git $ cd billingstack -3. Setup virtualenv and Install BillingStack and it's dependencies:: +3. Setup virtualenv and Install BillingStack and it's dependencies + + .. note:: -.. note:: This is to not interfere with system packages etc. - $ pip install virtualenv - $ python tools/install_venv.py - $ . .venv/bin/activate - $ python setup.py develop + :: -.. note:: - ValueError: unknown locale: UTF-8. To fix it you will have to set these environment variables in your ~/.profile or ~/.bashrc manually: - .profile - export LANG=en_US.UTF-8 - export LC_ALL=en_US.UTF-8 + $ pip install virtualenv + $ python tools/install_venv.py + $ . .venv/bin/activate + $ python setup.py develop - Copy sample configs to usable ones, inside the `etc` folder do:: + .. warning:: - $ sudo cp -r etc/billingstack /etc - $ cd /etc/billingstack - $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done + ValueError: unknown locale: UTF-8. + + To fix it you will have to set these environment variables in your ~/.profile or ~/.bashrc manually: + + export LANG=en_US.UTF-8 + export LC_ALL=en_US.UTF-8 + + Copy sample configs to usable ones, inside the `etc` folder do + + + :: + + $ sudo cp -r etc/billingstack /etc + $ cd /etc/billingstack + $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done + + .. note:: -.. note:: Change the wanted configuration settings to match your environment, the file is in the `/etc/billingstack` folder:: + :: + $ vi /etc/billingstack/billingstack.conf @@ -103,25 +118,31 @@ Installing Central 1. See `Common Steps`_ before proceeding. -2. Create the DB for :term:`central`:: +2. Create the DB for :term:`central` + + :: + + $ python tools/resync_billingstack.py - $ python tools/resync_billingstack.py +3. Now you might want to load sample data for the time being -3. Now you might want to load sample data for the time being:: + :: - $ python tools/load_samples.py + $ python tools/load_samples.py -4. Start the central service:: +4. Start the central service - $ billingstack-central + :: - ... + $ billingstack-central - 2013-06-09 03:51:22 DEBUG [amqp] Open OK! - 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 - 2013-06-09 03:51:22 DEBUG [amqp] Channel open - 2013-06-09 03:51:22 INFO [billingstack.openstack.common.rpc.common] Connected to AMQP server on localhost:5672 - 2013-06-09 03:51:22 DEBUG [billingstack.openstack.common.rpc.service] Creating Consumer connection for Service central + ... + + 2013-06-09 03:51:22 DEBUG [amqp] Open OK! + 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 + 2013-06-09 03:51:22 DEBUG [amqp] Channel open + 2013-06-09 03:51:22 INFO [...] Connected to AMQP server on localhost:5672 + 2013-06-09 03:51:22 DEBUG [...] Creating Consumer connection for Service central Installing the API @@ -135,10 +156,12 @@ Installing the API 1. See `Common Steps`_ before proceeding. -2. Start the API service:: +2. Start the API service + + :: - $ billingstack-api + $ billingstack-api - ... + ... - 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file + 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index 0b52285..0346de7 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -28,33 +28,49 @@ Common Steps .. note:: The below operations should take place underneath your /etc folder. -1. Install system package dependencies (Ubuntu):: +1. Install system package dependencies (Ubuntu) - $ apt-get install python-pip python-virtualenv - $ apt-get install rabbitmq-server mysql-server - $ apt-get build-dep python-lxml + :: -2. Clone the BillingStack repo off of Github:: + $ apt-get install python-pip + $ apt-get install rabbitmq-server - $ git clone https://github.com/billingstack/billingstack.git - $ cd billingstack +2. Clone the BillingStack repo off of Github -3. Setup virtualenv:: + :: -.. note:: - This is to not interfere with system packages etc. + $ git clone https://github.com/billingstack/billingstack.git + $ cd billingstack + +3. Setup virtualenv and Install BillingStack and it's dependencies + + .. note:: + + This is to not interfere with system packages etc. + :: + + $ pip install virtualenv + $ python tools/install_venv.py + $ . .venv/bin/activate + $ python setup.py develop - $ virtualenv --no-site-packages .venv - $ . .venv/bin/activate -4. Install BillingStack and it's dependencies:: + Copy sample configs to usable ones, inside the `etc` folder do - $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options - $ python setup.py develop + :: - Copy sample configs to usable ones, inside the `etc` folder do:: + $ sudo cp -r etc/billingstack /etc + $ cd /etc/billingstack + $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done - $ ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done + .. note:: + + Change the wanted configuration settings to match your environment, the file + is in the `/etc/billingstack` folder + + :: + + $ vi /etc/billingstack/billingstack.conf Installing Central @@ -69,26 +85,31 @@ Installing Central 1. See `Common Steps`_ before proceeding. -2. Configure the :term:`central` service:: +2. Create the DB for :term:`central` - Change the wanted configuration settings to match your environment, the file - is in the `etc` folder:: + :: - $ vi etc/billingstack.conf + $ python tools/resync_billingstack.py - Refer to the configuration file for details on configuring the service. +3. Now you might want to load sample data for the time being -3. Create the DB for :term:`central`:: + :: - $ python tools/resync_billingstack.py + $ python tools/load_samples.py -4. Now you might want to load sample data for the time being:: +4. Start the central service - $ python tools/dev_samples.py + :: -5. Start the central service:: + $ billingstack-central - $ billingstack-central + ... + + 2013-06-09 03:51:22 DEBUG [amqp] Open OK! + 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 + 2013-06-09 03:51:22 DEBUG [amqp] Channel open + 2013-06-09 03:51:22 INFO [...] Connected to AMQP server on localhost:5672 + 2013-06-09 03:51:22 DEBUG [...] Creating Consumer connection for Service central Installing the API @@ -102,15 +123,12 @@ Installing the API 1. See `Common Steps`_ before proceeding. -2. Configure the :term:`api` service:: - - Change the wanted configuration settings to match your environment, the file - is in the `etc` folder:: +2. Start the API service - $ vi billingstack.conf + :: - Refer to the configuration file for details on configuring the service. + $ billingstack-api -3. Start the API service:: + ... - $ billingstack-api \ No newline at end of file + 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file From a6d2cacd6daa905e7bbde834df06fb09f4b58b9c Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 24 Jun 2013 15:36:51 +0200 Subject: [PATCH 150/182] Move to billingstack.storage Change-Id: I64979139fc99bc1e6c7c27e64e4ecdddf169eb26 --- billingstack/biller/service.py | 4 +-- billingstack/biller/storage/__init__.py | 7 ----- billingstack/central/service.py | 4 +-- billingstack/central/storage/__init__.py | 27 ----------------- billingstack/rater/service.py | 4 +-- billingstack/storage/utils.py | 38 ++++++++++++++++++++++++ billingstack/tests/base.py | 10 ++----- 7 files changed, 46 insertions(+), 48 deletions(-) create mode 100644 billingstack/storage/utils.py diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py index 71cf27f..707bc14 100644 --- a/billingstack/biller/service.py +++ b/billingstack/biller/service.py @@ -16,7 +16,7 @@ from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.biller import storage +from billingstack.storage.utils import get_connection cfg.CONF.import_opt('biller_topic', 'billingstack.biller.rpcapi') @@ -39,5 +39,5 @@ def __init__(self, *args, **kwargs): super(Service, self).__init__(*args, **kwargs) def start(self): - self.storage_conn = storage.get_connection() + self.storage_conn = get_connection('biller') super(Service, self).start() diff --git a/billingstack/biller/storage/__init__.py b/billingstack/biller/storage/__init__.py index 5aef594..f9024d0 100644 --- a/billingstack/biller/storage/__init__.py +++ b/billingstack/biller/storage/__init__.py @@ -14,7 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -from oslo.config import cfg from billingstack.storage import base @@ -25,9 +24,3 @@ class StorageEngine(base.StorageEngine): class Connection(base.Connection): """Define the base API for biller storage""" - - -def get_connection(): - name = cfg.CONF['service:biller'].storage_driver - plugin = StorageEngine.get_plugin(name, invoke_on_load=True) - return plugin.get_connection() diff --git a/billingstack/central/service.py b/billingstack/central/service.py index e03785f..8d97480 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -20,7 +20,7 @@ from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service from billingstack.openstack.common import service as os_service -from billingstack.central import storage +from billingstack.storage.utils import get_connection from billingstack import service as bs_service @@ -41,7 +41,7 @@ def __init__(self, *args, **kwargs): super(Service, self).__init__(*args, **kwargs) def start(self): - self.storage_conn = storage.get_connection() + self.storage_conn = get_connection('central') super(Service, self).start() def __getattr__(self, name): diff --git a/billingstack/central/storage/__init__.py b/billingstack/central/storage/__init__.py index d58b705..1ebda20 100644 --- a/billingstack/central/storage/__init__.py +++ b/billingstack/central/storage/__init__.py @@ -15,7 +15,6 @@ # under the License. # # Copied: Moniker -from oslo.config import cfg from billingstack.openstack.common import log as logging from billingstack.storage import base @@ -30,29 +29,3 @@ class StorageEngine(base.StorageEngine): class Connection(base.Connection): pass - - -def get_engine(engine_name): - """ - Return the engine class from the provided engine name - """ - return StorageEngine.get_plugin(engine_name, invoke_on_load=True) - - -def get_connection(): - engine = get_engine(cfg.CONF['service:central'].storage_driver) - return engine.get_connection() - - -def setup_schema(): - """ Create the DB - Used for testing purposes """ - LOG.debug("Setting up Schema") - connection = get_connection() - connection.setup_schema() - - -def teardown_schema(): - """ Reset the DB to default - Used for testing purposes """ - LOG.debug("Tearing down Schema") - connection = get_connection() - connection.teardown_schema() diff --git a/billingstack/rater/service.py b/billingstack/rater/service.py index dbc4c63..351e80d 100644 --- a/billingstack/rater/service.py +++ b/billingstack/rater/service.py @@ -19,7 +19,7 @@ from billingstack.openstack.common import log as logging from billingstack.openstack.common import service as os_service from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.rater import storage +from billingstack.storage.utils import get_connection from billingstack import service as bs_service @@ -47,7 +47,7 @@ def __init__(self, *args, **kwargs): super(Service, self).__init__(*args, **kwargs) def start(self): - self.storage_conn = storage.get_connection() + self.storage_conn = get_connection('rater') super(Service, self).start() def create_usage(self, ctxt, values): diff --git a/billingstack/storage/utils.py b/billingstack/storage/utils.py new file mode 100644 index 0000000..992d892 --- /dev/null +++ b/billingstack/storage/utils.py @@ -0,0 +1,38 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from oslo.config import cfg +from billingstack.openstack.common import importutils + + +def get_engine(service_name, driver_name): + """ + Return the engine class from the provided engine name + """ + path = 'billingstack.%s.storage.StorageEngine' % service_name + base = importutils.import_object(path) + return base.get_plugin(driver_name, invoke_on_load=True) + + +def get_connection(service_name, driver_name=None): + """ + Return a instance of a storage connection + """ + driver_name = driver_name or \ + cfg.CONF['service:%s' % service_name].storage_driver + engine = get_engine(service_name, driver_name) + return engine.get_connection() diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index f726ca9..83abc9f 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -14,6 +14,7 @@ from billingstack import exceptions from billingstack import paths from billingstack import samples +from billingstack.storage import utils as storage_utils from billingstack.openstack.common.context import RequestContext, \ get_admin_context from billingstack.openstack.common import importutils @@ -142,10 +143,6 @@ def __init__(self, svc, **kw): group=self.svc_group) set_config(storage_driver=self.driver, group=self.svc_group) - # FIXME: Move this to a generic get_storage() method instead? - self.module = importutils.import_module( - 'billingstack.%s.storage' % self.svc) - # FIXME: Workout a way to support the different storage types self.helper = SQLAlchemyHelper(self) @@ -175,11 +172,8 @@ def get_storage_connection(self, **kw): """ Import the storage module for the service that we are going to act on, then return a connection object for that storage module. - - :param service: The service. """ - engine = self.module.get_engine(self.driver) - return engine.get_connection() + return storage_utils.get_connection(self.svc, self.driver) class ServiceFixture(fixtures.Fixture): From 1f0ba1c79c1d00ee0d056c222dee65e490d48759 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 24 Jun 2013 16:08:57 +0200 Subject: [PATCH 151/182] Move invoice code to Biller Change-Id: I580efc85c6df9b46191fa986899fe2432b9ebeac --- billingstack/api/v1/resources.py | 37 ++-- billingstack/biller/rpcapi.py | 57 +++++ billingstack/biller/service.py | 47 +++++ .../biller/storage/impl_sqlalchemy.py | 196 +++++++++++++++++- billingstack/central/rpcapi.py | 57 ----- billingstack/central/service.py | 47 ----- .../storage/impl_sqlalchemy/__init__.py | 138 ------------ .../central/storage/impl_sqlalchemy/models.py | 59 +----- .../tests/api/v1/test_invoice_state.py | 7 +- billingstack/tests/base.py | 2 +- 10 files changed, 325 insertions(+), 322 deletions(-) diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py index 87a4317..f29af74 100644 --- a/billingstack/api/v1/resources.py +++ b/billingstack/api/v1/resources.py @@ -19,6 +19,7 @@ from billingstack.api.base import Rest, Query from billingstack.api.v1 import models +from billingstack.biller.rpcapi import biller_api from billingstack.central.rpcapi import central_api from billingstack.rater.rpcapi import rater_api @@ -158,7 +159,7 @@ def list_pg_providers(q=[]): @bp.post('/invoice-states') @signature(models.InvoiceState, body=models.InvoiceState) def create_invoice_state(body): - row = central_api.create_invoice_state( + row = biller_api.create_invoice_state( request.environ['context'], body.to_db()) return models.InvoiceState.from_db(row) @@ -169,7 +170,7 @@ def create_invoice_state(body): def list_invoice_states(q=[]): criterion = _query_to_criterion(q) - rows = central_api.list_invoice_states( + rows = biller_api.list_invoice_states( request.environ['context'], criterion=criterion) return map(models.InvoiceState.from_db, rows) @@ -178,8 +179,8 @@ def list_invoice_states(q=[]): @bp.get('/invoice-states/') @signature(models.InvoiceState, str,) def get_invoice_state(state_id): - row = central_api.get_invoice_state(request.environ['context'], - state_id) + row = biller_api.get_invoice_state(request.environ['context'], + state_id) return models.InvoiceState.from_db(row) @@ -187,7 +188,7 @@ def get_invoice_state(state_id): @bp.put('/invoice-states/') @signature(models.InvoiceState, str, body=models.InvoiceState) def update_invoice_state(state_id, body): - row = central_api.update_invoice_state( + row = biller_api.update_invoice_state( request.environ['context'], state_id, body.to_db()) @@ -197,7 +198,7 @@ def update_invoice_state(state_id, body): @bp.delete('/invoice-states/') def delete_invoice_state(state_id): - central_api.delete_invoice_state( + biller_api.delete_invoice_state( request.environ['context'], state_id) return Response(status=204) @@ -531,7 +532,7 @@ def delete_product(merchant_id, product_id): @bp.post('/merchants//invoices') @signature(models.Invoice, str, body=models.Invoice) def create_invoice(merchant_id, body): - row = central_api.create_invoice( + row = biller_api.create_invoice( request.environ['context'], merchant_id, body.to_db()) @@ -544,7 +545,7 @@ def create_invoice(merchant_id, body): def list_invoices(merchant_id, q=[]): criterion = _query_to_criterion(q, merchant_id=merchant_id) - rows = central_api.list_invoices( + rows = biller_api.list_invoices( request.environ['context'], criterion=criterion) return map(models.Invoice.from_db, rows) @@ -553,8 +554,8 @@ def list_invoices(merchant_id, q=[]): @bp.get('/merchants//invoices/') @signature(models.Invoice, str, str) def get_invoice(merchant_id, invoice_id): - row = central_api.get_invoice(request.environ['context'], - invoice_id) + row = biller_api.get_invoice(request.environ['context'], + invoice_id) return models.Invoice.from_db(row) @@ -562,7 +563,7 @@ def get_invoice(merchant_id, invoice_id): @bp.put('/merchants//invoices/') @signature(models.Invoice, str, str, body=models.Invoice) def update_invoice(merchant_id, invoice_id, body): - row = central_api.update_invoice( + row = biller_api.update_invoice( request.environ['context'], invoice_id, body.to_db()) @@ -572,7 +573,7 @@ def update_invoice(merchant_id, invoice_id, body): @bp.delete('/merchants//invoices/') def delete_invoice(merchant_id, invoice_id): - central_api.delete_invoice(request.environ['context'], invoice_id) + biller_api.delete_invoice(request.environ['context'], invoice_id) return Response(status=204) @@ -580,7 +581,7 @@ def delete_invoice(merchant_id, invoice_id): @bp.post('/merchants//invoices//lines') @signature(models.InvoiceLine, str, str, body=models.InvoiceLine) def create_invoice_line(merchant_id, invoice_id, body): - row = central_api.create_invoice_line( + row = biller_api.create_invoice_line( request.environ['context'], invoice_id, body.to_db()) @@ -594,7 +595,7 @@ def list_invoice_lines(merchant_id, invoice_id, q=[]): criterion = _query_to_criterion(q, merchant_id=merchant_id, invoice_id=invoice_id) - rows = central_api.list_invoice_lines( + rows = biller_api.list_invoice_lines( request.environ['context'], criterion=criterion) return map(models.Product.from_db, rows) @@ -603,8 +604,8 @@ def list_invoice_lines(merchant_id, invoice_id, q=[]): @bp.get('/merchants//invoices//lines/') @signature(models.InvoiceLine, str, str, str) def get_invoice_line(merchant_id, invoice_id, line_id): - row = central_api.get_invoice_line(request.environ['context'], - line_id) + row = biller_api.get_invoice_line(request.environ['context'], + line_id) return models.Product.from_db(row) @@ -612,7 +613,7 @@ def get_invoice_line(merchant_id, invoice_id, line_id): @bp.put('/merchants//invoices//lines/') @signature(models.InvoiceLine, str, str, str, body=models.InvoiceLine) def update_invoice_line(merchant_id, invoice_id, line_id, body): - row = central_api.update_invoice_line( + row = biller_api.update_invoice_line( request.environ['context'], line_id, body.as_dict()) @@ -622,7 +623,7 @@ def update_invoice_line(merchant_id, invoice_id, line_id, body): @bp.delete('/merchants//invoices//lines/') def delete_invoice_line(merchant_id, invoice_id, line_id): - central_api.delete_invoice_line(request.environ['context'], line_id) + biller_api.delete_invoice_line(request.environ['context'], line_id) return Response(status=204) diff --git a/billingstack/biller/rpcapi.py b/billingstack/biller/rpcapi.py index 30e5b63..faa0f68 100644 --- a/billingstack/biller/rpcapi.py +++ b/billingstack/biller/rpcapi.py @@ -33,5 +33,62 @@ def __init__(self): topic=cfg.CONF.biller_topic, default_version=self.BASE_RPC_VERSION) + # Invoice States + def create_invoice_state(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_invoice_state', + values=values)) + + def list_invoice_states(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoice_states', + criterion=criterion)) + + def get_invoice_state(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_invoice_state', id_=id_)) + + def update_invoice_state(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_invoice_state', + id_=id_, values=values)) + + def delete_invoice_state(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_invoice_state', id_=id_)) + + # Invoices + def create_invoice(self, ctxt, merchant_id, values): + return self.call(ctxt, self.make_msg('create_invoice', + merchant_id=merchant_id, values=values)) + + def list_invoices(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoices', + criterion=criterion)) + + def get_invoice(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_invoice', id_=id_)) + + def update_invoice(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_invoice', id_=id_, + values=values)) + + def delete_invoice(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_invoice', id_=id_)) + + # Invoice lines + def create_invoice_line(self, ctxt, invoice_id, values): + return self.call(ctxt, self.make_msg('create_invoice_line', + invoice_id=invoice_id, values=values)) + + def list_invoice_lines(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_invoice_lines', + criterion=criterion)) + + def get_invoice_line(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_invoice_line', id_=id_)) + + def update_invoice_line(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_invoice_line', id_=id_, + values=values)) + + def delete_invoice_line(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_invoice_line', id_=id_)) + biller_api = BillerAPI() diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py index 707bc14..5c54c22 100644 --- a/billingstack/biller/service.py +++ b/billingstack/biller/service.py @@ -41,3 +41,50 @@ def __init__(self, *args, **kwargs): def start(self): self.storage_conn = get_connection('biller') super(Service, self).start() + + def create_invoice_state(self, ctxt, values): + return self.storage_conn.create_invoice_state(ctxt, values) + + def list_invoice_states(self, ctxt, **kw): + return self.storage_conn.list_invoice_states(ctxt, **kw) + + def get_invoice_state(self, ctxt, id_): + return self.storage_conn.get_invoice_state(ctxt, id_) + + def update_invoice_state(self, ctxt, id_, values): + return self.storage_conn.update_invoice_state(ctxt, id_, values) + + def delete_invoice_state(self, ctxt, id_): + return self.storage_conn.delete_invoice_state(ctxt, id_) + + def create_invoice(self, ctxt, merchant_id, values): + return self.storage_conn.create_invoice_state( + ctxt, merchant_id, values) + + def list_invoices(self, ctxt, **kw): + return self.storage_conn.list_invoices(ctxt, **kw) + + def get_invoice(self, ctxt, id_): + return self.storage_conn.get_invoice(ctxt, id_) + + def update_invoice(self, ctxt, id_, values): + return self.storage_conn.update_invoice(ctxt, id_, values) + + def delete_invoice(self, ctxt, id_): + return self.storage_conn.delete_invoice(ctxt, id_) + + def create_invoice_line(self, ctxt, invoice_id, values): + return self.storage_conn.create_invoice_line_state( + ctxt, invoice_id, values) + + def list_invoice_lines(self, ctxt, **kw): + return self.storage_conn.list_invoice_lines(ctxt, **kw) + + def get_invoice_line(self, ctxt, id_): + return self.storage_conn.get_invoice_line(ctxt, id_) + + def update_invoice_line(self, ctxt, id_, values): + return self.storage_conn.update_invoice_line(ctxt, id_, values) + + def delete_invoice_line(self, ctxt, id_): + return self.storage_conn.delete_invoice_line(ctxt, id_) diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py index aaf25c7..aeef60e 100644 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ b/billingstack/biller/storage/impl_sqlalchemy.py @@ -19,11 +19,16 @@ from oslo.config import cfg from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, ForeignKey +from sqlalchemy import DateTime, Float, Unicode +from sqlalchemy.orm import relationship from billingstack.openstack.common import log as logging -from billingstack.biller.storage import Connection, StorageEngine +from billingstack.sqlalchemy.types import UUID from billingstack.sqlalchemy import api, model_base, session +from billingstack.biller.storage import Connection, StorageEngine +from billingstack.central import rpcapi as central_api # DB SCHEMA BASE = declarative_base(cls=model_base.ModelBase) @@ -38,11 +43,60 @@ cfg.CONF.register_opts(session.SQLOPTS, group='biller:sqlalchemy') +class InvoiceState(BASE): + """ + A State representing the currented state a Invoice is in + + Example: + Completed, Failed + """ + name = Column(Unicode(60), nullable=False, primary_key=True) + title = Column(Unicode(100), nullable=False) + description = Column(Unicode(255)) + + +class Invoice(BASE, model_base.BaseMixin): + """ + An invoice + """ + identifier = Column(Unicode(255), nullable=False) + due = Column(DateTime, ) + + sub_total = Column(Float) + tax_percentage = Column(Float) + tax_total = Column(Float) + total = Column(Float) + + customer_id = Column(UUID, nullable=False) + + line_items = relationship('InvoiceLine', backref='invoice_lines') + + state = relationship('InvoiceState', backref='invoices') + state_id = Column(Unicode(60), ForeignKey('invoice_state.name'), + nullable=False) + + # Keep track of the currency and merchant + currency_name = Column(Unicode(10), nullable=False) + merchant_id = Column(UUID, nullable=False) + + +class InvoiceLine(BASE, model_base.BaseMixin): + """ + A Line item in which makes up the Invoice + """ + description = Column(Unicode(255)) + price = Column(Float) + quantity = Column(Float) + sub_total = Column(Float) + + invoice_id = Column(UUID, ForeignKey('invoice.id', ondelete='CASCADE', + onupdate='CASCADE'), nullable=False) + + class SQLAlchemyEngine(StorageEngine): __plugin_name__ = 'sqlalchemy' def get_connection(self): - return Connection() @@ -52,3 +106,141 @@ def __init__(self): def base(self): return BASE + + # Invoice States + def create_invoice_state(self, ctxt, values): + """ + Add a supported invoice_state to the database + """ + row = InvoiceState(**values) + self._save(row) + return dict(row) + + def list_invoice_states(self, ctxt, **kw): + rows = self._list(InvoiceState, **kw) + return map(dict, rows) + + def get_invoice_state(self, ctxt, id_): + row = self._get_id_or_name(InvoiceState, id_) + return dict(row) + + def update_invoice_state(self, ctxt, id_, values): + row = self._update(InvoiceState, id_, values, by_name=True) + return dict(row) + + def delete_invoice_state(self, ctxt, id_): + self._delete(InvoiceState, id_, by_name=True) + + # Invoices + def _invoice(self, row): + invoice = dict(row) + return invoice + + def create_invoice(self, ctxt, merchant_id, values): + """ + Add a new Invoice + + :param merchant_id: The Merchant + :param values: Values describing the new Invoice + """ + merchant = central_api.get_merchant(merchant_id) + + invoice = Invoice(**values) + invoice.merchant = merchant + + self._save(invoice) + return self._invoice(invoice) + + def list_invoices(self, ctxt, **kw): + """ + List Invoices + """ + rows = self._list(Invoice, **kw) + return map(self._invoice, rows) + + def get_invoice(self, ctxt, id_): + """ + Get a Invoice + + :param id_: The Invoice ID + """ + row = self._get(Invoice, id_) + return self.invoice(row) + + def update_invoice(self, ctxt, id_, values): + """ + Update a Invoice + + :param id_: The Invoice ID + :param values: Values to update with + """ + row = self._get(Invoice, id_) + row.update(values) + + self._save(row) + return self._invoice(row) + + def delete_invoice(self, ctxt, id_): + """ + Delete a Invoice + + :param id_: Invoice ID + """ + self._delete(Invoice, id_) + + # Invoices Items + def _invoice_line(self, row): + line = dict(row) + return line + + def create_invoice_items(self, ctxt, invoice_id, values): + """ + Add a new Invoice + + :param invoice_id: The Invoice + :param values: Values describing the new Invoice Line + """ + invoice = self._get(Invoice, invoice_id) + + line = InvoiceLine(**values) + line.invoice = invoice + + self._save(line) + return self._invoice_line(line) + + def list_invoice_lines(self, ctxt, **kw): + """ + List Invoice Lines + """ + rows = self._list(InvoiceLine, **kw) + return map(self._invoice_line, rows) + + def get_invoice_line(self, ctxt, id_): + """ + Get a Invoice Line + + :param id_: The Invoice Line ID + """ + row = self._get(InvoiceLine, id_) + return self._invoice_line(row) + + def update_invoice_line(self, ctxt, id_, values): + """ + Update a Invoice Line + + :param id_: The Invoice ID + :param values: Values to update with + """ + row = self._get(InvoiceLine, id_) + row.update(values) + + self._save(row) + return self._invoice_line(row) + + def delete_invoice_line(self, ctxt, id_): + """ + Delete a Invoice Line + + :param id_: Invoice Line ID + """ + self._delete(InvoiceLine, id_) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 604fdc8..173a788 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -71,25 +71,6 @@ def update_language(self, ctxt, id_, values): def delete_language(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_language', id_=id_)) - # Invoice States - def create_invoice_state(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_invoice_state', - values=values)) - - def list_invoice_states(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_states', - criterion=criterion)) - - def get_invoice_state(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice_state', id_=id_)) - - def update_invoice_state(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_state', - id_=id_, values=values)) - - def delete_invoice_state(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice_state', id_=id_)) - # Contact Info def create_contact_info(self, ctxt, id_, values): return self.call(ctxt, self.make_msg('create_contact_info', id_=id_, @@ -264,44 +245,6 @@ def update_product(self, ctxt, id_, values): def delete_product(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_product', id_=id_)) - # Invoices - def create_invoice(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_invoice', - merchant_id=merchant_id, values=values)) - - def list_invoices(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoices', - criterion=criterion)) - - def get_invoice(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice', id_=id_)) - - def update_invoice(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice', id_=id_, - values=values)) - - def delete_invoice(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice', id_=id_)) - - # Invoice lines - def create_invoice_line(self, ctxt, invoice_id, values): - return self.call(ctxt, self.make_msg('create_invoice_line', - invoice_id=invoice_id, values=values)) - - def list_invoice_lines(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_lines', - criterion=criterion)) - - def get_invoice_line(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice_line', id_=id_)) - - def update_invoice_line(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_line', id_=id_, - values=values)) - - def delete_invoice_line(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice_line', id_=id_)) - # Subscriptions def create_subscription(self, ctxt, values): return self.call(ctxt, self.make_msg('create_subscription', diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 8d97480..a675874 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -93,21 +93,6 @@ def update_language(self, ctxt, id_, values): def delete_language(self, ctxt, id_): return self.storage_conn.delete_language(ctxt, id_) - def create_invoice_state(self, ctxt, values): - return self.storage_conn.create_invoice_state(ctxt, values) - - def list_invoice_states(self, ctxt, **kw): - return self.storage_conn.list_invoice_states(ctxt, **kw) - - def get_invoice_state(self, ctxt, id_): - return self.storage_conn.get_invoice_state(ctxt, id_) - - def update_invoice_state(self, ctxt, id_, values): - return self.storage_conn.update_invoice_state(ctxt, id_, values) - - def delete_invoice_state(self, ctxt, id_): - return self.storage_conn.delete_invoice_state(ctxt, id_) - # TODO Fix def create_contact_info(self, ctxt, obj, values, cls=None, rel_attr='contact_info'): @@ -253,38 +238,6 @@ def update_product(self, ctxt, id_, values): def delete_product(self, ctxt, id_): return self.storage_conn.delete_product(ctxt, id_) - def create_invoice(self, ctxt, merchant_id, values): - return self.storage_conn.create_invoice_state( - ctxt, merchant_id, values) - - def list_invoices(self, ctxt, **kw): - return self.storage_conn.list_invoices(ctxt, **kw) - - def get_invoice(self, ctxt, id_): - return self.storage_conn.get_invoice(ctxt, id_) - - def update_invoice(self, ctxt, id_, values): - return self.storage_conn.update_invoice(ctxt, id_, values) - - def delete_invoice(self, ctxt, id_): - return self.storage_conn.delete_invoice(ctxt, id_) - - def create_invoice_line(self, ctxt, invoice_id, values): - return self.storage_conn.create_invoice_line_state( - ctxt, invoice_id, values) - - def list_invoice_lines(self, ctxt, **kw): - return self.storage_conn.list_invoice_lines(ctxt, **kw) - - def get_invoice_line(self, ctxt, id_): - return self.storage_conn.get_invoice_line(ctxt, id_) - - def update_invoice_line(self, ctxt, id_, values): - return self.storage_conn.update_invoice_line(ctxt, id_, values) - - def delete_invoice_line(self, ctxt, id_): - return self.storage_conn.delete_invoice_line(ctxt, id_) - def create_subscription(self, ctxt, values): return self.storage_conn.create_subscription(ctxt, values) diff --git a/billingstack/central/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py index 49819f8..a5802b9 100644 --- a/billingstack/central/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/central/storage/impl_sqlalchemy/__init__.py @@ -139,30 +139,6 @@ def update_language(self, ctxt, id_, values): def delete_language(self, ctxt, id_): self._delete(models.Language, id_, by_name=True) - # Invoice States - def create_invoice_state(self, ctxt, values): - """ - Add a supported invoice_state to the database - """ - row = models.InvoiceState(**values) - self._save(row) - return dict(row) - - def list_invoice_states(self, ctxt, **kw): - rows = self._list(models.InvoiceState, **kw) - return map(dict, rows) - - def get_invoice_state(self, ctxt, id_): - row = self._get_id_or_name(models.InvoiceState, id_) - return dict(row) - - def update_invoice_state(self, ctxt, id_, values): - row = self._update(models.InvoiceState, id_, values, by_name=True) - return dict(row) - - def delete_invoice_state(self, ctxt, id_): - self._delete(models.InvoiceState, id_, by_name=True) - # ContactInfo def create_contact_info(self, ctxt, obj, values, cls=None, rel_attr='contact_info'): @@ -594,120 +570,6 @@ def delete_product(self, ctxt, id_): """ self._delete(models.Product, id_) - # Invoices - def _invoice(self, row): - invoice = dict(row) - return invoice - - def create_invoice(self, ctxt, merchant_id, values): - """ - Add a new Invoice - - :param merchant_id: The Merchant - :param values: Values describing the new Invoice - """ - merchant = self._get(models.Merchant, merchant_id) - - invoice = models.Invoice(**values) - invoice.merchant = merchant - - self._save(invoice) - return self._invoice(invoice) - - def list_invoices(self, ctxt, **kw): - """ - List Invoices - """ - rows = self._list(models.Invoice, **kw) - return map(self._invoice, rows) - - def get_invoice(self, ctxt, id_): - """ - Get a Invoice - - :param id_: The Invoice ID - """ - row = self._get(models.Invoice, id_) - return self.invoice(row) - - def update_invoice(self, ctxt, id_, values): - """ - Update a Invoice - - :param id_: The Invoice ID - :param values: Values to update with - """ - row = self._get(models.Invoice, id_) - row.update(values) - - self._save(row) - return self._invoice(row) - - def delete_invoice(self, ctxt, id_): - """ - Delete a Invoice - - :param id_: Invoice ID - """ - self._delete(models.Invoice, id_) - - # Invoices Items - def _invoice_line(self, row): - line = dict(row) - return line - - def create_invoice_items(self, ctxt, invoice_id, values): - """ - Add a new Invoice - - :param invoice_id: The Invoice - :param values: Values describing the new Invoice Line - """ - invoice = self._get(models.Invoice, invoice_id) - - line = models.InvoiceLine(**values) - line.invoice = invoice - - self._save(line) - return self._invoice_line(line) - - def list_invoice_lines(self, ctxt, **kw): - """ - List Invoice Lines - """ - rows = self._list(models.InvoiceLine, **kw) - return map(self._invoice_line, rows) - - def get_invoice_line(self, ctxt, id_): - """ - Get a Invoice Line - - :param id_: The Invoice Line ID - """ - row = self._get(models.InvoiceLine, id_) - return self._invoice_line(row) - - def update_invoice_line(self, ctxt, id_, values): - """ - Update a Invoice Line - - :param id_: The Invoice ID - :param values: Values to update with - """ - row = self._get(models.InvoiceLine, id_) - row.update(values) - - self._save(row) - return self._invoice_line(row) - - def delete_invoice_line(self, ctxt, id_): - """ - Delete a Invoice Line - - :param id_: Invoice Line ID - """ - self._delete(models.InvoiceLine, id_) - # Subscriptions def _subscription(self, row): subscription = dict(row) diff --git a/billingstack/central/storage/impl_sqlalchemy/models.py b/billingstack/central/storage/impl_sqlalchemy/models.py index 5252331..1d9afab 100644 --- a/billingstack/central/storage/impl_sqlalchemy/models.py +++ b/billingstack/central/storage/impl_sqlalchemy/models.py @@ -14,8 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. from sqlalchemy import Column, ForeignKey, UniqueConstraint -from sqlalchemy import Integer, Float -from sqlalchemy import DateTime, Unicode +from sqlalchemy import Integer, Unicode from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base, declared_attr @@ -201,7 +200,6 @@ class Customer(BASE, BaseMixin): merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), nullable=False) - invoices = relationship('Invoice', backref='customer') payment_methods = relationship('PaymentMethod', backref='customer') contact_info = relationship( @@ -243,61 +241,6 @@ class PaymentMethod(BASE, BaseMixin): onupdate='CASCADE'), nullable=False) -class InvoiceState(BASE): - """ - A State representing the currented state a Invoice is in - - Example: - Completed, Failed - """ - name = Column(Unicode(60), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - description = Column(Unicode(255)) - - -class Invoice(BASE, BaseMixin): - """ - An invoice - """ - identifier = Column(Unicode(255), nullable=False) - due = Column(DateTime, ) - - sub_total = Column(Float) - tax_percentage = Column(Float) - tax_total = Column(Float) - total = Column(Float) - - customer_id = Column(UUID, ForeignKey('customer.id', ondelete='CASCADE'), - nullable=False) - - line_items = relationship('InvoiceLine', backref='invoice_lines') - - state = relationship('InvoiceState', backref='invoices') - state_id = Column(Unicode(60), ForeignKey('invoice_state.name'), - nullable=False) - - currency = relationship('Currency', backref='invoices') - currency_name = Column(Unicode(10), ForeignKey('currency.name'), - nullable=False) - - merchant = relationship('Merchant', backref='invoices') - merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), - nullable=False) - - -class InvoiceLine(BASE, BaseMixin): - """ - A Line item in which makes up the Invoice - """ - description = Column(Unicode(255)) - price = Column(Float) - quantity = Column(Float) - sub_total = Column(Float) - - invoice_id = Column(UUID, ForeignKey('invoice.id', ondelete='CASCADE', - onupdate='CASCADE'), nullable=False) - - class Plan(BASE, BaseMixin): """ A Product collection like a "Virtual Web Cluster" with 10 servers diff --git a/billingstack/tests/api/v1/test_invoice_state.py b/billingstack/tests/api/v1/test_invoice_state.py index a0f31ad..472ae7c 100644 --- a/billingstack/tests/api/v1/test_invoice_state.py +++ b/billingstack/tests/api/v1/test_invoice_state.py @@ -28,6 +28,11 @@ class TestInvoiceState(FunctionalTest): __test__ = True path = "invoice-states" + def setUp(self): + super(TestInvoiceState, self).setUp() + self.start_storage('biller') + self.start_service('biller') + def test_create_invoice_state(self): fixture = self.get_fixture('invoice_state') @@ -64,5 +69,5 @@ def test_delete_invoice_state(self): url = self.item_path(state['name']) self.delete(url) - data = self.services.central.list_invoice_states(self.admin_ctxt) + data = self.services.biller.list_invoice_states(self.admin_ctxt) self.assertLen(0, data) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 83abc9f..44cedd9 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -411,7 +411,7 @@ def create_currency(self, fixture=0, values={}, **kw): def create_invoice_state(self, fixture=0, values={}, **kw): fixture = self.get_fixture('invoice_state', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_invoice_state( + return fixture, self.services.biller.create_invoice_state( ctxt, fixture, **kw) def pg_provider_register(self, fixture=0, values={}, **kw): From cfa6aff01a35d40da3a20e4f06084d4b6d971190 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 24 Jun 2013 16:18:57 +0200 Subject: [PATCH 152/182] Add launch to Biller Change-Id: I7fd589ed4fa91d781c0544ef9f45dab6f847c74a --- billingstack/biller/service.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py index 5c54c22..cac82be 100644 --- a/billingstack/biller/service.py +++ b/billingstack/biller/service.py @@ -13,10 +13,14 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +import sys + from oslo.config import cfg from billingstack.openstack.common import log as logging +from billingstack.openstack.common import service as os_service from billingstack.openstack.common.rpc import service as rpc_service from billingstack.storage.utils import get_connection +from billingstack import service as bs_service cfg.CONF.import_opt('biller_topic', 'billingstack.biller.rpcapi') @@ -88,3 +92,10 @@ def update_invoice_line(self, ctxt, id_, values): def delete_invoice_line(self, ctxt, id_): return self.storage_conn.delete_invoice_line(ctxt, id_) + + +def launch(): + bs_service.prepare_service(sys.argv) + launcher = os_service.launch(Service(), + cfg.CONF['service:biller'].workers) + launcher.wait() From 76848a06fe79cf2a43851824e9dd8060dd8b241e Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 24 Jun 2013 17:14:42 +0200 Subject: [PATCH 153/182] Correct config path Change-Id: I61e13c742c190f4ace03d9b84b2c7b9e045fbca8 --- tools/control.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/control.sh b/tools/control.sh index e0cb9cc..83135f5 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -16,7 +16,7 @@ BASE_DIR=${BASE_DIR:-$SCRIPT_DIR/..} SCREEN_NAME=${SCREEN_NAME:-billingstack} SCREEN_LOGDIR=${SCREEN_LOGDIR:-$BASE_DIR/logs} -CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack.conf} +CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack/billingstack.conf} SERVICES="api,central,rater,biller,collector" From f85deab6c5a8b74f3eb3956c685412accf61d090 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 09:57:20 +0200 Subject: [PATCH 154/182] Remove unused function Change-Id: Ia43acf3f0a1121a85a6436de7baa7b53e8233eb8 --- tools/control.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/control.sh b/tools/control.sh index 83135f5..84a4ec3 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -217,7 +217,6 @@ function start() { case $1 in start) - prereq_setup screen_setup svc=$2 From 64c3c789a43bac1b9ee299f4a1c7237c360b4bd5 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 13:29:59 +0200 Subject: [PATCH 155/182] Import class and not object Change-Id: Ib30a7c086e8b86eb22a0ba17e9b569aa137c40ed --- billingstack/storage/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/billingstack/storage/utils.py b/billingstack/storage/utils.py index 992d892..0211cfc 100644 --- a/billingstack/storage/utils.py +++ b/billingstack/storage/utils.py @@ -24,7 +24,7 @@ def get_engine(service_name, driver_name): Return the engine class from the provided engine name """ path = 'billingstack.%s.storage.StorageEngine' % service_name - base = importutils.import_object(path) + base = importutils.import_class(path) return base.get_plugin(driver_name, invoke_on_load=True) From 688bce29243bb295ae386904abbc6384bb507dfb Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 14:09:43 +0200 Subject: [PATCH 156/182] Consolidate resync scripts Change-Id: I4a0dac648259cd1b29344f6e2fc46e5b98e0240f --- tools/resync_biller.py | 32 ---------------------- tools/resync_billingstack.py | 33 ---------------------- tools/resync_rater.py | 32 ---------------------- tools/resync_storage.py | 53 ++++++++++++++++++++++++++++++++++++ 4 files changed, 53 insertions(+), 97 deletions(-) delete mode 100644 tools/resync_biller.py delete mode 100644 tools/resync_billingstack.py delete mode 100644 tools/resync_rater.py create mode 100644 tools/resync_storage.py diff --git a/tools/resync_biller.py b/tools/resync_biller.py deleted file mode 100644 index 4bc76f4..0000000 --- a/tools/resync_biller.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging - -from billingstack import service -from billingstack.biller.storage import get_connection - - -LOG = logging.getLogger(__name__) - - -cfg.CONF.import_opt('storage_driver', 'billingstack.biller.storage', - group='service:biller') - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -cfg.CONF.import_opt('database_connection', - 'billingstack.biller.storage.impl_sqlalchemy', - group='biller:sqlalchemy') - - -if __name__ == '__main__': - service.prepare_service(sys.argv) - connection = get_connection() - - LOG.info("Re-Syncing database") - connection.teardown_schema() - connection.setup_schema() diff --git a/tools/resync_billingstack.py b/tools/resync_billingstack.py deleted file mode 100644 index 2f0cc61..0000000 --- a/tools/resync_billingstack.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging - -from billingstack import service -from billingstack.central.storage import get_connection - - -LOG = logging.getLogger(__name__) - - -cfg.CONF.import_opt('storage_driver', 'billingstack.central', - group='service:central') - - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -cfg.CONF.import_opt('database_connection', - 'billingstack.central.storage.impl_sqlalchemy', - group='central:sqlalchemy') - -if __name__ == '__main__': - service.prepare_service(sys.argv) - conn = get_connection() - - LOG.info("Re-Syncing database") - conn.teardown_schema() - conn.setup_schema() diff --git a/tools/resync_rater.py b/tools/resync_rater.py deleted file mode 100644 index d3c43fc..0000000 --- a/tools/resync_rater.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging - -from billingstack import service -from billingstack.rater.storage import get_connection - - -LOG = logging.getLogger(__name__) - - -cfg.CONF.import_opt('storage_driver', 'billingstack.rater.storage', - group='service:rater') - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -cfg.CONF.import_opt('database_connection', - 'billingstack.rater.storage.impl_sqlalchemy', - group='rater:sqlalchemy') - - -if __name__ == '__main__': - service.prepare_service(sys.argv) - connection = get_connection() - - LOG.info("Re-Syncing database") - connection.teardown_schema() - connection.setup_schema() diff --git a/tools/resync_storage.py b/tools/resync_storage.py new file mode 100644 index 0000000..799294e --- /dev/null +++ b/tools/resync_storage.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +import sys + +from oslo.config import cfg + +from billingstack.openstack.common import log as logging +from billingstack import service +from billingstack.storage.utils import get_connection + +# NOTE: make this based on entrypoints ? +SERVICES = ['biller', 'central', 'rater'] + +LOG = logging.getLogger(__name__) + +cfg.CONF.import_opt('state_path', 'billingstack.paths') + +cfg.CONF.register_cli_opt(cfg.StrOpt('services', default=SERVICES)) +cfg.CONF.register_cli_opt(cfg.BoolOpt('resync', default=False)) + + +def import_service_opts(service): + cfg.CONF.import_opt('storage_driver', 'billingstack.%s.storage' % service, + group='service:%s' % service) + cfg.CONF.import_opt('database_connection', + 'billingstack.%s.storage.impl_sqlalchemy' % service, + group='%s:sqlalchemy' % service) + + +def resync_service_storage(service, resync=False): + """ + Resync the storage for a service + """ + connection = get_connection(service) + if resync: + connection.teardown_schema() + connection.setup_schema() + + +if __name__ == '__main__': + service.prepare_service(sys.argv) + + try: + services = cfg.CONF.services + for svc in services: + import_service_opts(svc) + except Exception: + LOG.error('Error importing service options for %s, will exit' % svc) + sys.exit(0) + + for svc in services: + LOG.info("Doing storage for %s" % svc) + resync_service_storage(svc) From 938bc4e11eaba85d3975e2c116bfa8736a0b7a69 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 14:13:57 +0200 Subject: [PATCH 157/182] Fix load samples for now Change-Id: Ic3f2f916ca861dd11ebf41a181b978dd8798074e --- tools/load_samples.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/load_samples.py b/tools/load_samples.py index 0c109aa..0d8be1e 100644 --- a/tools/load_samples.py +++ b/tools/load_samples.py @@ -6,7 +6,7 @@ from billingstack import service from billingstack.samples import get_samples -from billingstack.central.storage import get_connection +from billingstack.storage.utils import get_connection from billingstack.openstack.common.context import get_admin_context @@ -32,7 +32,7 @@ def get_fixture(name, fixture=0, values={}): if __name__ == '__main__': service.prepare_service(sys.argv) - conn = get_connection() + conn = get_connection('central') samples = get_samples() From c8cb196bfe95ddc3ac2a7db6f9721029450f78cb Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 22:14:08 +0200 Subject: [PATCH 158/182] Forgotten in the previous change to fix this as well Change-Id: If0edf73c7e35528f0f128ae8c0851f0519af49b7 --- billingstack/payment_gateway/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py index b9f13ac..265a015 100644 --- a/billingstack/payment_gateway/__init__.py +++ b/billingstack/payment_gateway/__init__.py @@ -18,7 +18,7 @@ from billingstack import exceptions from billingstack.openstack.common import log from billingstack.payment_gateway.base import Provider -from billingstack.central.storage import get_connection +from billingstack.storage.utils import get_connection LOG = log.getLogger(__name__) @@ -47,7 +47,7 @@ def _register(ep, context, conn): def register_providers(context): - conn = get_connection() + conn = get_connection('central') em = ExtensionManager(Provider.__plugin_ns__) em.map(_register, context, conn) From 8db476fa3e1353333e3e91c485f7e3dfef7b77aa Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 22:22:46 +0200 Subject: [PATCH 159/182] Remove this here Change-Id: I076cd797c85ed743665f5bbb74111db48912950e --- billingstack/payment_gateway/base.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py index 5a9f229..3a3f110 100644 --- a/billingstack/payment_gateway/base.py +++ b/billingstack/payment_gateway/base.py @@ -13,7 +13,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -from billingstack.central.storage import get_connection from billingstack.plugin import Plugin @@ -57,12 +56,6 @@ def get_client(self): """ raise NotImplementedError - def get_connection(self): - """ - Helper to get a storage conncection in BS - """ - return get_connection() - @classmethod def create_account(self, values): """ From aaf69c7cf35d6fc146bb0f5356bf2cb983721138 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 23:13:15 +0200 Subject: [PATCH 160/182] Import opts and fix command Change-Id: I61595beb5430d10b943aa36919e9f551d10f7134 --- billingstack/manage/database.py | 12 +++++------- billingstack/manage/provider.py | 4 +++- billingstack/storage/utils.py | 13 ++++++++++++- 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py index 667240e..c2147cb 100644 --- a/billingstack/manage/database.py +++ b/billingstack/manage/database.py @@ -14,23 +14,21 @@ # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg + from billingstack.openstack.common import log from billingstack.manage.base import Command -from billingstack.central.storage import get_connection +from billingstack.storage.utils import get_connection LOG = log.getLogger(__name__) -cfg.CONF.import_opt( - 'storage_driver', - 'billingstack.central', - group='service:central') +cfg.CONF.import_opt('state_path', 'billingstack.paths') class DatabaseCommand(Command): """ A Command that uses a storage connection to do some stuff """ - def setup(self, parsed_args): - self.conn = get_connection() + def get_connection(self, service): + return get_connection(service) diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py index ac31ad8..faa5ecb 100644 --- a/billingstack/manage/provider.py +++ b/billingstack/manage/provider.py @@ -31,7 +31,9 @@ def execute(self, parsed_args): class ProvidersList(DatabaseCommand, ListCommand): def execute(self, parsed_args): context = get_admin_context() - data = self.conn.list_pg_providers(context) + conn = self.get_connection('central') + + data = conn.list_pg_providers(context) for p in data: keys = ['type', 'name'] diff --git a/billingstack/storage/utils.py b/billingstack/storage/utils.py index 0211cfc..4f55333 100644 --- a/billingstack/storage/utils.py +++ b/billingstack/storage/utils.py @@ -19,6 +19,14 @@ from billingstack.openstack.common import importutils +def import_service_opts(service): + cfg.CONF.import_opt('storage_driver', 'billingstack.%s.storage' % service, + group='service:%s' % service) + cfg.CONF.import_opt('database_connection', + 'billingstack.%s.storage.impl_sqlalchemy' % service, + group='%s:sqlalchemy' % service) + + def get_engine(service_name, driver_name): """ Return the engine class from the provided engine name @@ -28,10 +36,13 @@ def get_engine(service_name, driver_name): return base.get_plugin(driver_name, invoke_on_load=True) -def get_connection(service_name, driver_name=None): +def get_connection(service_name, driver_name=None, import_opts=True): """ Return a instance of a storage connection """ + if import_opts: + import_service_opts(service_name) + driver_name = driver_name or \ cfg.CONF['service:%s' % service_name].storage_driver engine = get_engine(service_name, driver_name) From 82c0a77f1d82ba7c5af1c28fdc12504c6dc5b3a3 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 25 Jun 2013 23:46:10 +0200 Subject: [PATCH 161/182] Make it resync and remove import_opts Change-Id: If40c429e80e0308e61c38d1aa2abfaba5d20dba6 --- tools/resync_storage.py | 19 ++----------------- 1 file changed, 2 insertions(+), 17 deletions(-) diff --git a/tools/resync_storage.py b/tools/resync_storage.py index 799294e..cb53509 100644 --- a/tools/resync_storage.py +++ b/tools/resync_storage.py @@ -19,14 +19,6 @@ cfg.CONF.register_cli_opt(cfg.BoolOpt('resync', default=False)) -def import_service_opts(service): - cfg.CONF.import_opt('storage_driver', 'billingstack.%s.storage' % service, - group='service:%s' % service) - cfg.CONF.import_opt('database_connection', - 'billingstack.%s.storage.impl_sqlalchemy' % service, - group='%s:sqlalchemy' % service) - - def resync_service_storage(service, resync=False): """ Resync the storage for a service @@ -40,14 +32,7 @@ def resync_service_storage(service, resync=False): if __name__ == '__main__': service.prepare_service(sys.argv) - try: - services = cfg.CONF.services - for svc in services: - import_service_opts(svc) - except Exception: - LOG.error('Error importing service options for %s, will exit' % svc) - sys.exit(0) - + services = cfg.CONF.services for svc in services: LOG.info("Doing storage for %s" % svc) - resync_service_storage(svc) + resync_service_storage(svc, resync=cfg.CONF.resync) From 64247eaa35bdb7887d533d9b1b826dee11a88434 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 26 Jun 2013 18:47:56 +0200 Subject: [PATCH 162/182] Fixup screen Change-Id: I655530c7f27f730d18e8e9d36e819e93ffd5a530 --- tools/control.sh | 55 +++++++++++++++++++++++++++++++++++------------- 1 file changed, 40 insertions(+), 15 deletions(-) diff --git a/tools/control.sh b/tools/control.sh index 84a4ec3..33c9bf7 100755 --- a/tools/control.sh +++ b/tools/control.sh @@ -12,11 +12,14 @@ set -x # Keep track of this directory SCRIPT_DIR=$(cd $(dirname "$0") && pwd) BASE_DIR=${BASE_DIR:-$SCRIPT_DIR/..} +CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack/billingstack.conf} SCREEN_NAME=${SCREEN_NAME:-billingstack} SCREEN_LOGDIR=${SCREEN_LOGDIR:-$BASE_DIR/logs} +SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc +USE_SCREEN=$(trueorfalse True $USE_SCREEN) -CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack/billingstack.conf} +SERVICE_DIR=${SERVICE_DIR:-$BASE_DIR/status} SERVICES="api,central,rater,biller,collector" @@ -91,9 +94,6 @@ function run_process() { # Helper to launch a service in a named screen # screen_it service "command-line" function screen_it { - SCREEN_NAME=${SCREEN_NAME:-stack} - SERVICE_DIR=${SERVICE_DIR:-$BASE_DIR/status} - USE_SCREEN=$(trueorfalse True $USE_SCREEN) if is_service_enabled $1; then # Append the service to the screen rc file @@ -126,8 +126,6 @@ function screen_it { # Screen rc file builder # screen_rc service "command-line" function screen_rc { - SCREEN_NAME=${SCREEN_NAME:-stack} - SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc if [[ ! -e $SCREENRC ]]; then # Name the screen session echo "sessionname $SCREEN_NAME" > $SCREENRC @@ -152,19 +150,29 @@ function is_service_enabled() { function screen_setup() { - ensure_dir $SCREEN_LOGDIR - # Check to see if we are already running DevStack - # Note that this may fail if USE_SCREEN=False - if type -p screen >/dev/null && screen -ls | egrep -q "[0-9].$SCREEN_NAME"; then - echo "You are already running a stack.sh session." - echo "To rejoin this session type 'screen -x stack'." - echo "To destroy this session, type './unstack.sh'." - exit 1 + # Set up logging of screen windows + # Set ``SCREEN_LOGDIR`` to turn on logging of screen windows to the + # directory specified in ``SCREEN_LOGDIR``, we will log to the the file + # ``screen-$SERVICE_NAME-$TIMESTAMP.log`` in that dir and have a link + # ``screen-$SERVICE_NAME.log`` to the latest log file. + # Logs are kept for as long specified in ``LOGDAYS``. + if [[ -n "$SCREEN_LOGDIR" ]]; then + + # We make sure the directory is created. + if [[ -d "$SCREEN_LOGDIR" ]]; then + # We cleanup the old logs + find $SCREEN_LOGDIR -maxdepth 1 -name screen-\*.log -mtime +$LOGDAYS -exec rm {} \; + else + ensure_dir $SCREEN_LOGDIR + fi + fi + + if [[ ! -d "$SERVICE_DIR/$SCREEN_NAME" ]]; then + mkdir -p "$SERVICE_DIR/$SCREEN_NAME" fi USE_SCREEN=$(trueorfalse True $USE_SCREEN) - echo $USE_SCREEN if [[ "$USE_SCREEN" == "True" ]]; then # Create a new named screen to run processes in screen -d -m -S $SCREEN_NAME -t shell -s /bin/bash @@ -185,6 +193,18 @@ function screen_setup() { } +screen_is_running() { + # Check to see if we are already running DevStack + # Note that this may fail if USE_SCREEN=False + if type -p screen >/dev/null && screen -ls | egrep -q "[0-9].$SCREEN_NAME"; then + echo "Already running a session." + echo "To rejoin this session type 'screen -x $SCREEN_NAME'." + echo "To destroy this session, type './$0 stop'." + exit 1 + fi +} + + function screen_destroy() { SCREEN=$(which screen) if [[ -n "$SCREEN" ]]; then @@ -193,6 +213,8 @@ function screen_destroy() { screen -X -S $SESSION quit fi fi + + rm -f "$SERVICE_DIR/$SCREEN_NAME"/*.failure } @@ -203,8 +225,10 @@ function start_svc() { } + function start() { local svc=$1 + [ "$svc" == 'all' ] && { for s in $(echo "$SERVICES" | tr ',' ' '); do start_svc $s @@ -217,6 +241,7 @@ function start() { case $1 in start) + screen_is_running screen_setup svc=$2 From 7329c0312949913de2a301d0448c93c6752b8421 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 2 Jul 2013 13:22:49 +0200 Subject: [PATCH 163/182] Sync Oslo and Requirements Change-Id: I648a81c9ed6f611383d196316291504588279b78 --- billingstack/openstack/common/context.py | 8 +- billingstack/openstack/common/db/api.py | 19 +- .../openstack/common/eventlet_backdoor.py | 75 ++++++- billingstack/openstack/common/exception.py | 9 +- billingstack/openstack/common/excutils.py | 31 +++ billingstack/openstack/common/fileutils.py | 75 +++++++ billingstack/openstack/common/gettextutils.py | 176 +++++++++++++++ billingstack/openstack/common/importutils.py | 7 +- billingstack/openstack/common/jsonutils.py | 11 +- billingstack/openstack/common/lockutils.py | 50 ++++- billingstack/openstack/common/log.py | 88 +++++--- billingstack/openstack/common/loopingcall.py | 4 +- .../openstack/common/network_utils.py | 23 +- billingstack/openstack/common/notifier/api.py | 2 +- .../openstack/common/notifier/log_notifier.py | 4 +- .../common/notifier/no_op_notifier.py | 2 +- .../openstack/common/notifier/rpc_notifier.py | 2 +- .../common/notifier/rpc_notifier2.py | 2 +- billingstack/openstack/common/processutils.py | 145 ++++++++++-- billingstack/openstack/common/rpc/__init__.py | 2 +- billingstack/openstack/common/rpc/amqp.py | 164 ++++---------- billingstack/openstack/common/rpc/common.py | 67 +++--- .../openstack/common/rpc/dispatcher.py | 29 ++- .../openstack/common/rpc/impl_fake.py | 7 +- .../openstack/common/rpc/impl_kombu.py | 142 ++++++------ .../openstack/common/rpc/impl_qpid.py | 178 ++++++++++----- billingstack/openstack/common/rpc/impl_zmq.py | 168 ++++++-------- .../openstack/common/rpc/matchmaker.py | 207 +++++------------- .../openstack/common/rpc/matchmaker_redis.py | 12 +- .../openstack/common/rpc/matchmaker_ring.py | 110 ++++++++++ billingstack/openstack/common/rpc/proxy.py | 63 +++++- .../openstack/common/rpc/serializer.py | 52 +++++ billingstack/openstack/common/rpc/service.py | 5 +- billingstack/openstack/common/service.py | 5 +- billingstack/openstack/common/threadgroup.py | 13 +- billingstack/openstack/common/timeutils.py | 28 +-- tools/pip-requires | 20 +- tools/test-requires | 19 +- 38 files changed, 1331 insertions(+), 693 deletions(-) create mode 100644 billingstack/openstack/common/rpc/matchmaker_ring.py create mode 100644 billingstack/openstack/common/rpc/serializer.py diff --git a/billingstack/openstack/common/context.py b/billingstack/openstack/common/context.py index e9cfd73..a236bdd 100644 --- a/billingstack/openstack/common/context.py +++ b/billingstack/openstack/common/context.py @@ -23,16 +23,18 @@ """ import itertools -import uuid + +from billingstack.openstack.common import uuidutils def generate_request_id(): - return 'req-' + str(uuid.uuid4()) + return 'req-%s' % uuidutils.generate_uuid() class RequestContext(object): - """ + """Helper class to represent useful information about a request context. + Stores information about the security context under which the user accesses the system, as well as additional request information. """ diff --git a/billingstack/openstack/common/db/api.py b/billingstack/openstack/common/db/api.py index 703527c..9505ea8 100644 --- a/billingstack/openstack/common/db/api.py +++ b/billingstack/openstack/common/db/api.py @@ -19,8 +19,9 @@ Supported configuration options: -`db_backend`: DB backend name or full module path to DB backend module. -`dbapi_use_tpool`: Enable thread pooling of DB API calls. +The following two parameters are in the 'database' group: +`backend`: DB backend name or full module path to DB backend module. +`use_tpool`: Enable thread pooling of DB API calls. A DB backend module should implement a method named 'get_backend' which takes no arguments. The method can return any object that implements DB @@ -44,17 +45,21 @@ db_opts = [ - cfg.StrOpt('db_backend', + cfg.StrOpt('backend', default='sqlalchemy', + deprecated_name='db_backend', + deprecated_group='DEFAULT', help='The backend to use for db'), - cfg.BoolOpt('dbapi_use_tpool', + cfg.BoolOpt('use_tpool', default=False, + deprecated_name='dbapi_use_tpool', + deprecated_group='DEFAULT', help='Enable the experimental use of thread pooling for ' 'all DB API calls') ] CONF = cfg.CONF -CONF.register_opts(db_opts) +CONF.register_opts(db_opts, 'database') class DBAPI(object): @@ -75,8 +80,8 @@ def __get_backend(self): if self.__backend: # Another thread assigned it return self.__backend - backend_name = CONF.db_backend - self.__use_tpool = CONF.dbapi_use_tpool + backend_name = CONF.database.backend + self.__use_tpool = CONF.database.use_tpool if self.__use_tpool: from eventlet import tpool self.__tpool = tpool diff --git a/billingstack/openstack/common/eventlet_backdoor.py b/billingstack/openstack/common/eventlet_backdoor.py index c0ad460..01bc984 100644 --- a/billingstack/openstack/common/eventlet_backdoor.py +++ b/billingstack/openstack/common/eventlet_backdoor.py @@ -16,8 +16,13 @@ # License for the specific language governing permissions and limitations # under the License. +from __future__ import print_function + +import errno import gc +import os import pprint +import socket import sys import traceback @@ -26,18 +31,38 @@ import greenlet from oslo.config import cfg +from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import log as logging + +help_for_backdoor_port = 'Acceptable ' + \ + 'values are 0, and :, where 0 results in ' + \ + 'listening on a random tcp port number, results in ' + \ + 'listening on the specified port number and not enabling backdoor' + \ + 'if it is in use and : results in listening on the ' + \ + 'smallest unused port number within the specified range of port ' + \ + 'numbers. The chosen port is displayed in the service\'s log file.' eventlet_backdoor_opts = [ - cfg.IntOpt('backdoor_port', + cfg.StrOpt('backdoor_port', default=None, - help='port for eventlet backdoor to listen') + help='Enable eventlet backdoor. %s' % help_for_backdoor_port) ] CONF = cfg.CONF CONF.register_opts(eventlet_backdoor_opts) +LOG = logging.getLogger(__name__) + + +class EventletBackdoorConfigValueError(Exception): + def __init__(self, port_range, help_msg, ex): + msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' + '%(help)s' % + {'range': port_range, 'ex': ex, 'help': help_msg}) + super(EventletBackdoorConfigValueError, self).__init__(msg) + self.port_range = port_range def _dont_use_this(): - print "Don't use this, just disconnect instead" + print("Don't use this, just disconnect instead") def _find_objects(t): @@ -46,16 +71,43 @@ def _find_objects(t): def _print_greenthreads(): for i, gt in enumerate(_find_objects(greenlet.greenlet)): - print i, gt + print(i, gt) traceback.print_stack(gt.gr_frame) - print + print() def _print_nativethreads(): for threadId, stack in sys._current_frames().items(): - print threadId + print(threadId) traceback.print_stack(stack) - print + print() + + +def _parse_port_range(port_range): + if ':' not in port_range: + start, end = port_range, port_range + else: + start, end = port_range.split(':', 1) + try: + start, end = int(start), int(end) + if end < start: + raise ValueError + return start, end + except ValueError as ex: + raise EventletBackdoorConfigValueError(port_range, ex, + help_for_backdoor_port) + + +def _listen(host, start_port, end_port, listen_func): + try_port = start_port + while True: + try: + return listen_func((host, try_port)) + except socket.error as exc: + if (exc.errno != errno.EADDRINUSE or + try_port >= end_port): + raise + try_port += 1 def initialize_if_enabled(): @@ -70,6 +122,8 @@ def initialize_if_enabled(): if CONF.backdoor_port is None: return None + start_port, end_port = _parse_port_range(str(CONF.backdoor_port)) + # NOTE(johannes): The standard sys.displayhook will print the value of # the last expression and set it to __builtin__._, which overwrites # the __builtin__._ that gettext sets. Let's switch to using pprint @@ -80,8 +134,13 @@ def displayhook(val): pprint.pprint(val) sys.displayhook = displayhook - sock = eventlet.listen(('localhost', CONF.backdoor_port)) + sock = _listen('localhost', start_port, end_port, eventlet.listen) + + # In the case of backdoor port being zero, a port number is assigned by + # listen(). In any case, pull the port number out here. port = sock.getsockname()[1] + LOG.info(_('Eventlet backdoor listening on %(port)s for process %(pid)d') % + {'port': port, 'pid': os.getpid()}) eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, locals=backdoor_locals) return port diff --git a/billingstack/openstack/common/exception.py b/billingstack/openstack/common/exception.py index a2fdb66..96463a1 100644 --- a/billingstack/openstack/common/exception.py +++ b/billingstack/openstack/common/exception.py @@ -98,7 +98,7 @@ def wrap_exception(f): def _wrap(*args, **kw): try: return f(*args, **kw) - except Exception, e: + except Exception as e: if not isinstance(e, Error): #exc_type, exc_value, exc_traceback = sys.exc_info() logging.exception(_('Uncaught exception')) @@ -110,8 +110,7 @@ def _wrap(*args, **kw): class OpenstackException(Exception): - """ - Base Exception + """Base Exception class. To correctly use this class, inherit from it and define a 'message' property. That message will get printf'd @@ -123,9 +122,9 @@ def __init__(self, **kwargs): try: self._error_string = self.message % kwargs - except Exception as e: + except Exception: if _FATAL_EXCEPTION_FORMAT_ERRORS: - raise e + raise else: # at least get the core message out if something happened self._error_string = self.message diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py index 4d00903..d61a8c8 100644 --- a/billingstack/openstack/common/excutils.py +++ b/billingstack/openstack/common/excutils.py @@ -22,6 +22,7 @@ import contextlib import logging import sys +import time import traceback from billingstack.openstack.common.gettextutils import _ @@ -49,3 +50,33 @@ def save_and_reraise_exception(): traceback.format_exception(type_, value, tb)) raise raise type_, value, tb + + +def forever_retry_uncaught_exceptions(infunc): + def inner_func(*args, **kwargs): + last_log_time = 0 + last_exc_message = None + exc_count = 0 + while True: + try: + return infunc(*args, **kwargs) + except Exception as exc: + if exc.message == last_exc_message: + exc_count += 1 + else: + exc_count = 1 + # Do not log any more frequently than once a minute unless + # the exception message changes + cur_time = int(time.time()) + if (cur_time - last_log_time > 60 or + exc.message != last_exc_message): + logging.exception( + _('Unexpected exception occurred %d time(s)... ' + 'retrying.') % exc_count) + last_log_time = cur_time + last_exc_message = exc.message + exc_count = 0 + # This should be a very rare event. In case it isn't, do + # a sleep. + time.sleep(1) + return inner_func diff --git a/billingstack/openstack/common/fileutils.py b/billingstack/openstack/common/fileutils.py index b988ad0..f17e3f7 100644 --- a/billingstack/openstack/common/fileutils.py +++ b/billingstack/openstack/common/fileutils.py @@ -16,9 +16,18 @@ # under the License. +import contextlib import errno import os +from billingstack.openstack.common import excutils +from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import log as logging + +LOG = logging.getLogger(__name__) + +_FILE_CACHE = {} + def ensure_tree(path): """Create a directory (and any ancestor directories required) @@ -33,3 +42,69 @@ def ensure_tree(path): raise else: raise + + +def read_cached_file(filename, force_reload=False): + """Read from a file if it has been modified. + + :param force_reload: Whether to reload the file. + :returns: A tuple with a boolean specifying if the data is fresh + or not. + """ + global _FILE_CACHE + + if force_reload and filename in _FILE_CACHE: + del _FILE_CACHE[filename] + + reloaded = False + mtime = os.path.getmtime(filename) + cache_info = _FILE_CACHE.setdefault(filename, {}) + + if not cache_info or mtime > cache_info.get('mtime', 0): + LOG.debug(_("Reloading cached file %s") % filename) + with open(filename) as fap: + cache_info['data'] = fap.read() + cache_info['mtime'] = mtime + reloaded = True + return (reloaded, cache_info['data']) + + +def delete_if_exists(path): + """Delete a file, but ignore file not found error. + + :param path: File to delete + """ + + try: + os.unlink(path) + except OSError as e: + if e.errno == errno.ENOENT: + return + else: + raise + + +@contextlib.contextmanager +def remove_path_on_error(path): + """Protect code that wants to operate on PATH atomically. + Any exception will cause PATH to be removed. + + :param path: File to work with + """ + try: + yield + except Exception: + with excutils.save_and_reraise_exception(): + delete_if_exists(path) + + +def file_open(*args, **kwargs): + """Open file + + see built-in file() documentation for more details + + Note: The reason this is kept in a separate module is to easily + be able to provide a stub module that doesn't alter system + state at all (for unit tests) + """ + return file(*args, **kwargs) diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py index fd35873..b2bb74d 100644 --- a/billingstack/openstack/common/gettextutils.py +++ b/billingstack/openstack/common/gettextutils.py @@ -2,6 +2,7 @@ # Copyright 2012 Red Hat, Inc. # All Rights Reserved. +# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -23,8 +24,11 @@ from billingstack.openstack.common.gettextutils import _ """ +import copy import gettext +import logging.handlers import os +import UserString _localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') _t = gettext.translation('billingstack', localedir=_localedir, fallback=True) @@ -48,3 +52,175 @@ def install(domain): gettext.install(domain, localedir=os.environ.get(domain.upper() + '_LOCALEDIR'), unicode=True) + + +""" +Lazy gettext functionality. + +The following is an attempt to introduce a deferred way +to do translations on messages in OpenStack. We attempt to +override the standard _() function and % (format string) operation +to build Message objects that can later be translated when we have +more information. Also included is an example LogHandler that +translates Messages to an associated locale, effectively allowing +many logs, each with their own locale. +""" + + +def get_lazy_gettext(domain): + """Assemble and return a lazy gettext function for a given domain. + + Factory method for a project/module to get a lazy gettext function + for its own translation domain (i.e. nova, glance, cinder, etc.) + """ + + def _lazy_gettext(msg): + """Create and return a Message object. + + Message encapsulates a string so that we can translate it later when + needed. + """ + return Message(msg, domain) + + return _lazy_gettext + + +class Message(UserString.UserString, object): + """Class used to encapsulate translatable messages.""" + def __init__(self, msg, domain): + # _msg is the gettext msgid and should never change + self._msg = msg + self._left_extra_msg = '' + self._right_extra_msg = '' + self.params = None + self.locale = None + self.domain = domain + + @property + def data(self): + # NOTE(mrodden): this should always resolve to a unicode string + # that best represents the state of the message currently + + localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR') + if self.locale: + lang = gettext.translation(self.domain, + localedir=localedir, + languages=[self.locale], + fallback=True) + else: + # use system locale for translations + lang = gettext.translation(self.domain, + localedir=localedir, + fallback=True) + + full_msg = (self._left_extra_msg + + lang.ugettext(self._msg) + + self._right_extra_msg) + + if self.params is not None: + full_msg = full_msg % self.params + + return unicode(full_msg) + + def _save_parameters(self, other): + # we check for None later to see if + # we actually have parameters to inject, + # so encapsulate if our parameter is actually None + if other is None: + self.params = (other, ) + else: + self.params = copy.deepcopy(other) + + return self + + # overrides to be more string-like + def __unicode__(self): + return self.data + + def __str__(self): + return self.data.encode('utf-8') + + def __getstate__(self): + to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg', + 'domain', 'params', 'locale'] + new_dict = self.__dict__.fromkeys(to_copy) + for attr in to_copy: + new_dict[attr] = copy.deepcopy(self.__dict__[attr]) + + return new_dict + + def __setstate__(self, state): + for (k, v) in state.items(): + setattr(self, k, v) + + # operator overloads + def __add__(self, other): + copied = copy.deepcopy(self) + copied._right_extra_msg += other.__str__() + return copied + + def __radd__(self, other): + copied = copy.deepcopy(self) + copied._left_extra_msg += other.__str__() + return copied + + def __mod__(self, other): + # do a format string to catch and raise + # any possible KeyErrors from missing parameters + self.data % other + copied = copy.deepcopy(self) + return copied._save_parameters(other) + + def __mul__(self, other): + return self.data * other + + def __rmul__(self, other): + return other * self.data + + def __getitem__(self, key): + return self.data[key] + + def __getslice__(self, start, end): + return self.data.__getslice__(start, end) + + def __getattribute__(self, name): + # NOTE(mrodden): handle lossy operations that we can't deal with yet + # These override the UserString implementation, since UserString + # uses our __class__ attribute to try and build a new message + # after running the inner data string through the operation. + # At that point, we have lost the gettext message id and can just + # safely resolve to a string instead. + ops = ['capitalize', 'center', 'decode', 'encode', + 'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip', + 'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill'] + if name in ops: + return getattr(self.data, name) + else: + return UserString.UserString.__getattribute__(self, name) + + +class LocaleHandler(logging.Handler): + """Handler that can have a locale associated to translate Messages. + + A quick example of how to utilize the Message class above. + LocaleHandler takes a locale and a target logging.Handler object + to forward LogRecord objects to after translating the internal Message. + """ + + def __init__(self, locale, target): + """Initialize a LocaleHandler + + :param locale: locale to use for translating messages + :param target: logging.Handler object to forward + LogRecord objects to after translation + """ + logging.Handler.__init__(self) + self.locale = locale + self.target = target + + def emit(self, record): + if isinstance(record.msg, Message): + # set the locale and resolve to a string + record.msg.locale = self.locale + + self.target.emit(record) diff --git a/billingstack/openstack/common/importutils.py b/billingstack/openstack/common/importutils.py index 3bd277f..7a303f9 100644 --- a/billingstack/openstack/common/importutils.py +++ b/billingstack/openstack/common/importutils.py @@ -24,7 +24,7 @@ def import_class(import_str): - """Returns a class from a string including module and class""" + """Returns a class from a string including module and class.""" mod_str, _sep, class_str = import_str.rpartition('.') try: __import__(mod_str) @@ -41,8 +41,9 @@ def import_object(import_str, *args, **kwargs): def import_object_ns(name_space, import_str, *args, **kwargs): - """ - Import a class and return an instance of it, first by trying + """Tries to import object from default namespace. + + Imports a class and return an instance of it, first by trying to find the class in a default namespace, then failing back to a full path if not found in the default namespace. """ diff --git a/billingstack/openstack/common/jsonutils.py b/billingstack/openstack/common/jsonutils.py index 189bbbd..493ff87 100644 --- a/billingstack/openstack/common/jsonutils.py +++ b/billingstack/openstack/common/jsonutils.py @@ -41,6 +41,9 @@ import types import xmlrpclib +import netaddr +import six + from billingstack.openstack.common import timeutils @@ -93,7 +96,7 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, # value of itertools.count doesn't get caught by nasty_type_tests # and results in infinite loop when list(value) is called. if type(value) == itertools.count: - return unicode(value) + return six.text_type(value) # FIXME(vish): Workaround for LP bug 852095. Without this workaround, # tests that raise an exception in a mocked method that @@ -135,14 +138,16 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, # Likely an instance of something. Watch for cycles. # Ignore class member vars. return recursive(value.__dict__, level=level + 1) + elif isinstance(value, netaddr.IPAddress): + return six.text_type(value) else: if any(test(value) for test in _nasty_type_tests): - return unicode(value) + return six.text_type(value) return value except TypeError: # Class objects are tricky since they may define something like # __iter__ defined but it isn't callable as list(). - return unicode(value) + return six.text_type(value) def dumps(value, default=to_primitive, **kwargs): diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py index 6fe0610..2903167 100644 --- a/billingstack/openstack/common/lockutils.py +++ b/billingstack/openstack/common/lockutils.py @@ -49,6 +49,10 @@ CONF.register_opts(util_opts) +def set_defaults(lock_path): + cfg.set_defaults(util_opts, lock_path=lock_path) + + class _InterProcessLock(object): """Lock implementation which allows multiple locks, working around issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does @@ -82,7 +86,7 @@ def __enter__(self): # to have a laughable 10 attempts "blocking" mechanism. self.trylock() return self - except IOError, e: + except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): # external locks synchronise things like iptables # updates - give it some time to prevent busy spinning @@ -154,17 +158,18 @@ def bar(self, *args): This way only one of either foo or bar can be executing at a time. - The lock_file_prefix argument is used to provide lock files on disk with a - meaningful prefix. The prefix should end with a hyphen ('-') if specified. + :param lock_file_prefix: The lock_file_prefix argument is used to provide + lock files on disk with a meaningful prefix. The prefix should end with a + hyphen ('-') if specified. - The external keyword argument denotes whether this lock should work across - multiple processes. This means that if two different workers both run a - a method decorated with @synchronized('mylock', external=True), only one - of them will execute at a time. + :param external: The external keyword argument denotes whether this lock + should work across multiple processes. This means that if two different + workers both run a a method decorated with @synchronized('mylock', + external=True), only one of them will execute at a time. - The lock_path keyword argument is used to specify a special location for - external lock files to live. If nothing is set, then CONF.lock_path is - used as a default. + :param lock_path: The lock_path keyword argument is used to specify a + special location for external lock files to live. If nothing is set, then + CONF.lock_path is used as a default. """ def wrap(f): @@ -247,3 +252,28 @@ def inner(*args, **kwargs): return retval return inner return wrap + + +def synchronized_with_prefix(lock_file_prefix): + """Partial object generator for the synchronization decorator. + + Redefine @synchronized in each project like so:: + + (in nova/utils.py) + from nova.openstack.common import lockutils + + synchronized = lockutils.synchronized_with_prefix('nova-') + + + (in nova/foo.py) + from nova import utils + + @utils.synchronized('mylock') + def bar(self, *args): + ... + + The lock_file_prefix argument is used to provide lock files on disk with a + meaningful prefix. The prefix should end with a hyphen ('-') if specified. + """ + + return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) diff --git a/billingstack/openstack/common/log.py b/billingstack/openstack/common/log.py index 1203ffd..c4f4185 100644 --- a/billingstack/openstack/common/log.py +++ b/billingstack/openstack/common/log.py @@ -37,19 +37,17 @@ import logging.config import logging.handlers import os -import stat import sys import traceback from oslo.config import cfg from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils from billingstack.openstack.common import local -from billingstack.openstack.common import notifier -_DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s" _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" common_cli_opts = [ @@ -74,11 +72,13 @@ 'documentation for details on logging configuration ' 'files.'), cfg.StrOpt('log-format', - default=_DEFAULT_LOG_FORMAT, + default=None, metavar='FORMAT', help='A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' - 'Default: %(default)s'), + 'This option is deprecated. Please use ' + 'logging_context_format_string and ' + 'logging_default_format_string instead.'), cfg.StrOpt('log-date-format', default=_DEFAULT_LOG_DATE_FORMAT, metavar='DATE_FORMAT', @@ -104,10 +104,7 @@ generic_log_opts = [ cfg.BoolOpt('use_stderr', default=True, - help='Log output to standard error'), - cfg.StrOpt('logfile_mode', - default='0644', - help='Default file mode used when creating log files'), + help='Log output to standard error') ] log_opts = [ @@ -211,7 +208,27 @@ def _get_log_file_path(binary=None): return '%s.log' % (os.path.join(logdir, binary),) -class ContextAdapter(logging.LoggerAdapter): +class BaseLoggerAdapter(logging.LoggerAdapter): + + def audit(self, msg, *args, **kwargs): + self.log(logging.AUDIT, msg, *args, **kwargs) + + +class LazyAdapter(BaseLoggerAdapter): + def __init__(self, name='unknown', version='unknown'): + self._logger = None + self.extra = {} + self.name = name + self.version = version + + @property + def logger(self): + if not self._logger: + self._logger = getLogger(self.name, self.version) + return self._logger + + +class ContextAdapter(BaseLoggerAdapter): warn = logging.LoggerAdapter.warning def __init__(self, logger, project_name, version_string): @@ -219,8 +236,9 @@ def __init__(self, logger, project_name, version_string): self.project = project_name self.version = version_string - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) + @property + def handlers(self): + return self.logger.handlers def deprecated(self, msg, *args, **kwargs): stdmsg = _("Deprecated: %s") % msg @@ -304,17 +322,6 @@ def format(self, record): return jsonutils.dumps(message) -class PublishErrorsHandler(logging.Handler): - def emit(self, record): - if ('billingstack.openstack.common.notifier.log_notifier' in - CONF.notification_driver): - return - notifier.api.notify(None, 'error.publisher', - 'error_notification', - notifier.api.ERROR, - dict(error=record.msg)) - - def _create_logging_excepthook(product_name): def logging_excepthook(type, value, tb): extra = {} @@ -340,7 +347,7 @@ def __str__(self): def _load_log_config(log_config): try: logging.config.fileConfig(log_config) - except ConfigParser.Error, exc: + except ConfigParser.Error as exc: raise LogConfigError(log_config, str(exc)) @@ -399,11 +406,6 @@ def _setup_logging_from_conf(): filelog = logging.handlers.WatchedFileHandler(logpath) log_root.addHandler(filelog) - mode = int(CONF.logfile_mode, 8) - st = os.stat(logpath) - if st.st_mode != (stat.S_IFREG | mode): - os.chmod(logpath, mode) - if CONF.use_stderr: streamlog = ColorHandler() log_root.addHandler(streamlog) @@ -415,15 +417,22 @@ def _setup_logging_from_conf(): log_root.addHandler(streamlog) if CONF.publish_errors: - log_root.addHandler(PublishErrorsHandler(logging.ERROR)) + handler = importutils.import_object( + "billingstack.openstack.common.log_handler.PublishErrorsHandler", + logging.ERROR) + log_root.addHandler(handler) + datefmt = CONF.log_date_format for handler in log_root.handlers: - datefmt = CONF.log_date_format + # NOTE(alaski): CONF.log_format overrides everything currently. This + # should be deprecated in favor of context aware formatting. if CONF.log_format: handler.setFormatter(logging.Formatter(fmt=CONF.log_format, datefmt=datefmt)) + log_root.info('Deprecated: log_format is now deprecated and will ' + 'be removed in the next release') else: - handler.setFormatter(LegacyFormatter(datefmt=datefmt)) + handler.setFormatter(ContextFormatter(datefmt=datefmt)) if CONF.debug: log_root.setLevel(logging.DEBUG) @@ -432,14 +441,11 @@ def _setup_logging_from_conf(): else: log_root.setLevel(logging.WARNING) - level = logging.NOTSET for pair in CONF.default_log_levels: mod, _sep, level_name = pair.partition('=') level = logging.getLevelName(level_name) logger = logging.getLogger(mod) logger.setLevel(level) - for handler in log_root.handlers: - logger.addHandler(handler) _loggers = {} @@ -452,6 +458,16 @@ def getLogger(name='unknown', version='unknown'): return _loggers[name] +def getLazyLogger(name='unknown', version='unknown'): + """Returns lazy logger. + + Creates a pass-through logger that does not create the real logger + until it is really needed and delegates all calls to the real logger + once it is created. + """ + return LazyAdapter(name, version) + + class WritableLogger(object): """A thin wrapper that responds to `write` and logs.""" @@ -463,7 +479,7 @@ def write(self, msg): self.logger.log(self.level, msg) -class LegacyFormatter(logging.Formatter): +class ContextFormatter(logging.Formatter): """A context.RequestContext aware formatter configured through flags. The flags used to set format strings are: logging_context_format_string diff --git a/billingstack/openstack/common/loopingcall.py b/billingstack/openstack/common/loopingcall.py index 1b46dbe..1976bf9 100644 --- a/billingstack/openstack/common/loopingcall.py +++ b/billingstack/openstack/common/loopingcall.py @@ -84,7 +84,7 @@ def _inner(): LOG.warn(_('task run outlasted interval by %s sec') % -delay) greenthread.sleep(delay if delay > 0 else 0) - except LoopingCallDone, e: + except LoopingCallDone as e: self.stop() done.send(e.retvalue) except Exception: @@ -131,7 +131,7 @@ def _inner(): LOG.debug(_('Dynamic looping call sleeping for %.02f ' 'seconds'), idle) greenthread.sleep(idle) - except LoopingCallDone, e: + except LoopingCallDone as e: self.stop() done.send(e.retvalue) except Exception: diff --git a/billingstack/openstack/common/network_utils.py b/billingstack/openstack/common/network_utils.py index 5224e01..dbed1ce 100644 --- a/billingstack/openstack/common/network_utils.py +++ b/billingstack/openstack/common/network_utils.py @@ -19,14 +19,12 @@ Network-related utilities and helper functions. """ -import logging - -LOG = logging.getLogger(__name__) +import urlparse def parse_host_port(address, default_port=None): - """ - Interpret a string as a host:port pair. + """Interpret a string as a host:port pair. + An IPv6 address MUST be escaped if accompanied by a port, because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334 means both [2001:db8:85a3::8a2e:370:7334] and @@ -66,3 +64,18 @@ def parse_host_port(address, default_port=None): port = default_port return (host, None if port is None else int(port)) + + +def urlsplit(url, scheme='', allow_fragments=True): + """Parse a URL using urlparse.urlsplit(), splitting query and fragments. + This function papers over Python issue9374 when needed. + + The parameters are the same as urlparse.urlsplit. + """ + scheme, netloc, path, query, fragment = urlparse.urlsplit( + url, scheme, allow_fragments) + if allow_fragments and '#' in path: + path, fragment = path.split('#', 1) + if '?' in path: + path, query = path.split('?', 1) + return urlparse.SplitResult(scheme, netloc, path, query, fragment) diff --git a/billingstack/openstack/common/notifier/api.py b/billingstack/openstack/common/notifier/api.py index c39ae48..565f454 100644 --- a/billingstack/openstack/common/notifier/api.py +++ b/billingstack/openstack/common/notifier/api.py @@ -56,7 +56,7 @@ class BadPriorityException(Exception): def notify_decorator(name, fn): - """ decorator for notify which is used from utils.monkey_patch() + """Decorator for notify which is used from utils.monkey_patch(). :param name: name of the function :param function: - object of the function diff --git a/billingstack/openstack/common/notifier/log_notifier.py b/billingstack/openstack/common/notifier/log_notifier.py index a0fcaf9..e842dbf 100644 --- a/billingstack/openstack/common/notifier/log_notifier.py +++ b/billingstack/openstack/common/notifier/log_notifier.py @@ -24,7 +24,9 @@ def notify(_context, message): """Notifies the recipient of the desired event given the model. - Log notifications using openstack's default logging system""" + + Log notifications using openstack's default logging system. + """ priority = message.get('priority', CONF.default_notification_level) diff --git a/billingstack/openstack/common/notifier/no_op_notifier.py b/billingstack/openstack/common/notifier/no_op_notifier.py index bc7a56c..13d946e 100644 --- a/billingstack/openstack/common/notifier/no_op_notifier.py +++ b/billingstack/openstack/common/notifier/no_op_notifier.py @@ -15,5 +15,5 @@ def notify(_context, message): - """Notifies the recipient of the desired event given the model""" + """Notifies the recipient of the desired event given the model.""" pass diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py index ac626e3..3c3e690 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ b/billingstack/openstack/common/notifier/rpc_notifier.py @@ -31,7 +31,7 @@ def notify(context, message): - """Sends a notification via RPC""" + """Sends a notification via RPC.""" if not context: context = req_context.get_admin_context() priority = message.get('priority', diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py index 7261c70..b7bc56e 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ b/billingstack/openstack/common/notifier/rpc_notifier2.py @@ -37,7 +37,7 @@ def notify(context, message): - """Sends a notification via RPC""" + """Sends a notification via RPC.""" if not context: context = req_context.get_admin_context() priority = message.get('priority', diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py index d1ef569..1fcb22d 100644 --- a/billingstack/openstack/common/processutils.py +++ b/billingstack/openstack/common/processutils.py @@ -19,19 +19,26 @@ System-level utilities and helper functions. """ -import logging +import os import random import shlex +import signal from eventlet.green import subprocess from eventlet import greenthread from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import log as logging LOG = logging.getLogger(__name__) +class InvalidArgumentError(Exception): + def __init__(self, message=None): + super(InvalidArgumentError, self).__init__(message) + + class UnknownArgumentError(Exception): def __init__(self, message=None): super(UnknownArgumentError, self).__init__(message) @@ -40,6 +47,12 @@ def __init__(self, message=None): class ProcessExecutionError(Exception): def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, description=None): + self.exit_code = exit_code + self.stderr = stderr + self.stdout = stdout + self.cmd = cmd + self.description = description + if description is None: description = "Unexpected error while running command." if exit_code is None: @@ -49,20 +62,31 @@ def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, super(ProcessExecutionError, self).__init__(message) +class NoRootWrapSpecified(Exception): + def __init__(self, message=None): + super(NoRootWrapSpecified, self).__init__(message) + + +def _subprocess_setup(): + # Python installs a SIGPIPE handler by default. This is usually not what + # non-Python subprocesses expect. + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + + def execute(*cmd, **kwargs): - """ - Helper method to shell out and execute a command through subprocess with - optional retry. + """Helper method to shell out and execute a command through subprocess. + + Allows optional retry. :param cmd: Passed to subprocess.Popen. :type cmd: string :param process_input: Send to opened process. :type proces_input: string - :param check_exit_code: Defaults to 0. Will raise - :class:`ProcessExecutionError` - if the command exits without returning this value - as a returncode - :type check_exit_code: int + :param check_exit_code: Single bool, int, or list of allowed exit + codes. Defaults to [0]. Raise + :class:`ProcessExecutionError` unless + program exits with one of these code. + :type check_exit_code: boolean, int, or [int] :param delay_on_retry: True | False. Defaults to True. If set to True, wait a short amount of time before retrying. :type delay_on_retry: boolean @@ -72,8 +96,12 @@ def execute(*cmd, **kwargs): the command is prefixed by the command specified in the root_helper kwarg. :type run_as_root: boolean - :param root_helper: command to prefix all cmd's with + :param root_helper: command to prefix to commands called with + run_as_root=True :type root_helper: string + :param shell: whether or not there should be a shell used to + execute this command. Defaults to false. + :type shell: boolean :returns: (stdout, stderr) from process execution :raises: :class:`UnknownArgumentError` on receiving unknown arguments @@ -81,16 +109,31 @@ def execute(*cmd, **kwargs): """ process_input = kwargs.pop('process_input', None) - check_exit_code = kwargs.pop('check_exit_code', 0) + check_exit_code = kwargs.pop('check_exit_code', [0]) + ignore_exit_code = False delay_on_retry = kwargs.pop('delay_on_retry', True) attempts = kwargs.pop('attempts', 1) run_as_root = kwargs.pop('run_as_root', False) root_helper = kwargs.pop('root_helper', '') - if len(kwargs): + shell = kwargs.pop('shell', False) + + if isinstance(check_exit_code, bool): + ignore_exit_code = not check_exit_code + check_exit_code = [0] + elif isinstance(check_exit_code, int): + check_exit_code = [check_exit_code] + + if kwargs: raise UnknownArgumentError(_('Got unknown keyword args ' 'to utils.execute: %r') % kwargs) - if run_as_root: + + if run_as_root and os.geteuid() != 0: + if not root_helper: + raise NoRootWrapSpecified( + message=('Command requested root, but did not specify a root ' + 'helper.')) cmd = shlex.split(root_helper) + list(cmd) + cmd = map(str, cmd) while attempts > 0: @@ -98,11 +141,21 @@ def execute(*cmd, **kwargs): try: LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd)) _PIPE = subprocess.PIPE # pylint: disable=E1101 + + if os.name == 'nt': + preexec_fn = None + close_fds = False + else: + preexec_fn = _subprocess_setup + close_fds = True + obj = subprocess.Popen(cmd, stdin=_PIPE, stdout=_PIPE, stderr=_PIPE, - close_fds=True) + close_fds=close_fds, + preexec_fn=preexec_fn, + shell=shell) result = None if process_input is not None: result = obj.communicate(process_input) @@ -112,9 +165,7 @@ def execute(*cmd, **kwargs): _returncode = obj.returncode # pylint: disable=E1101 if _returncode: LOG.debug(_('Result was %s') % _returncode) - if (isinstance(check_exit_code, int) and - not isinstance(check_exit_code, bool) and - _returncode != check_exit_code): + if not ignore_exit_code and _returncode not in check_exit_code: (stdout, stderr) = result raise ProcessExecutionError(exit_code=_returncode, stdout=stdout, @@ -133,3 +184,63 @@ def execute(*cmd, **kwargs): # call clean something up in between calls, without # it two execute calls in a row hangs the second one greenthread.sleep(0) + + +def trycmd(*args, **kwargs): + """A wrapper around execute() to more easily handle warnings and errors. + + Returns an (out, err) tuple of strings containing the output of + the command's stdout and stderr. If 'err' is not empty then the + command can be considered to have failed. + + :discard_warnings True | False. Defaults to False. If set to True, + then for succeeding commands, stderr is cleared + + """ + discard_warnings = kwargs.pop('discard_warnings', False) + + try: + out, err = execute(*args, **kwargs) + failed = False + except ProcessExecutionError as exn: + out, err = '', str(exn) + failed = True + + if not failed and discard_warnings and err: + # Handle commands that output to stderr but otherwise succeed + err = '' + + return out, err + + +def ssh_execute(ssh, cmd, process_input=None, + addl_env=None, check_exit_code=True): + LOG.debug(_('Running cmd (SSH): %s'), cmd) + if addl_env: + raise InvalidArgumentError(_('Environment not supported over SSH')) + + if process_input: + # This is (probably) fixable if we need it... + raise InvalidArgumentError(_('process_input not supported over SSH')) + + stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd) + channel = stdout_stream.channel + + # NOTE(justinsb): This seems suspicious... + # ...other SSH clients have buffering issues with this approach + stdout = stdout_stream.read() + stderr = stderr_stream.read() + stdin_stream.close() + + exit_status = channel.recv_exit_status() + + # exit_status == -1 if no exit code was returned + if exit_status != -1: + LOG.debug(_('Result was %s') % exit_status) + if check_exit_code and exit_status != 0: + raise ProcessExecutionError(exit_code=exit_status, + stdout=stdout, + stderr=stderr, + cmd=cmd) + + return (stdout, stderr) diff --git a/billingstack/openstack/common/rpc/__init__.py b/billingstack/openstack/common/rpc/__init__.py index f178214..45b842e 100644 --- a/billingstack/openstack/common/rpc/__init__.py +++ b/billingstack/openstack/common/rpc/__init__.py @@ -26,13 +26,13 @@ """ import inspect -import logging from oslo.config import cfg from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import importutils from billingstack.openstack.common import local +from billingstack.openstack.common import log as logging LOG = logging.getLogger(__name__) diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index 3677c7e..feb164e 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -34,10 +34,6 @@ from eventlet import pools from eventlet import queue from eventlet import semaphore -# TODO(pekowsk): Remove import cfg and below comment in Havana. -# This import should no longer be needed when the amqp_rpc_single_reply_queue -# option is removed. -from oslo.config import cfg from billingstack.openstack.common import excutils from billingstack.openstack.common.gettextutils import _ @@ -46,16 +42,6 @@ from billingstack.openstack.common.rpc import common as rpc_common -# TODO(pekowski): Remove this option in Havana. -amqp_opts = [ - cfg.BoolOpt('amqp_rpc_single_reply_queue', - default=False, - help='Enable a fast single reply queue if using AMQP based ' - 'RPC like RabbitMQ or Qpid.'), -] - -cfg.CONF.register_opts(amqp_opts) - UNIQUE_ID = '_unique_id' LOG = logging.getLogger(__name__) @@ -83,7 +69,7 @@ def empty(self): # is the above "while loop" gets all the cached connections from the # pool and closes them, but never returns them to the pool, a pool # leak. The unit tests hang waiting for an item to be returned to the - # pool. The unit tests get here via the teatDown() method. In the run + # pool. The unit tests get here via the tearDown() method. In the run # time code, it gets here via cleanup() and only appears in service.py # just before doing a sys.exit(), so cleanup() only happens once and # the leakage is not a problem. @@ -102,19 +88,19 @@ def get_connection_pool(conf, connection_cls): class ConnectionContext(rpc_common.Connection): - """The class that is actually returned to the caller of - create_connection(). This is essentially a wrapper around - Connection that supports 'with'. It can also return a new - Connection, or one from a pool. The function will also catch - when an instance of this class is to be deleted. With that - we can return Connections to the pool on exceptions and so - forth without making the caller be responsible for catching - them. If possible the function makes sure to return a - connection to the pool. + """The class that is actually returned to the create_connection() caller. + + This is essentially a wrapper around Connection that supports 'with'. + It can also return a new Connection, or one from a pool. + + The function will also catch when an instance of this class is to be + deleted. With that we can return Connections to the pool on exceptions + and so forth without making the caller be responsible for catching them. + If possible the function makes sure to return a connection to the pool. """ def __init__(self, conf, connection_pool, pooled=True, server_params=None): - """Create a new connection, or get one from the pool""" + """Create a new connection, or get one from the pool.""" self.connection = None self.conf = conf self.connection_pool = connection_pool @@ -127,7 +113,7 @@ def __init__(self, conf, connection_pool, pooled=True, server_params=None): self.pooled = pooled def __enter__(self): - """When with ConnectionContext() is used, return self""" + """When with ConnectionContext() is used, return self.""" return self def _done(self): @@ -165,20 +151,19 @@ def create_consumer(self, topic, proxy, fanout=False): def create_worker(self, topic, proxy, pool_name): self.connection.create_worker(topic, proxy, pool_name) - def join_consumer_pool(self, callback, pool_name, topic, exchange_name): + def join_consumer_pool(self, callback, pool_name, topic, exchange_name, + ack_on_error=True): self.connection.join_consumer_pool(callback, pool_name, topic, - exchange_name) + exchange_name, + ack_on_error) def consume_in_thread(self): self.connection.consume_in_thread() - def consume_in_thread_group(self, thread_group): - self.connection.consume_in_thread_group(thread_group) - def __getattr__(self, key): - """Proxy all other calls to the Connection instance""" + """Proxy all other calls to the Connection instance.""" if self.connection: return getattr(self.connection, key) else: @@ -186,7 +171,7 @@ def __getattr__(self, key): class ReplyProxy(ConnectionContext): - """ Connection class for RPC replies / callbacks """ + """Connection class for RPC replies / callbacks.""" def __init__(self, conf, connection_pool): self._call_waiters = {} self._num_call_waiters = 0 @@ -200,8 +185,10 @@ def _process_data(self, message_data): msg_id = message_data.pop('_msg_id', None) waiter = self._call_waiters.get(msg_id) if not waiter: - LOG.warn(_('no calling threads waiting for msg_id : %s' - ', message : %s') % (msg_id, message_data)) + LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s' + ', message : %(data)s'), {'msg_id': msg_id, + 'data': message_data}) + LOG.warn(_('_call_waiters: %s') % str(self._call_waiters)) else: waiter.put(message_data) @@ -234,12 +221,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, failure = rpc_common.serialize_remote_exception(failure, log_failure) - try: - msg = {'result': reply, 'failure': failure} - except TypeError: - msg = {'result': dict((k, repr(v)) - for k, v in reply.__dict__.iteritems()), - 'failure': failure} + msg = {'result': reply, 'failure': failure} if ending: msg['ending'] = True _add_unique_id(msg) @@ -254,7 +236,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call""" + """Context that supports replying to a rpc.call.""" def __init__(self, **kwargs): self.msg_id = kwargs.pop('msg_id', None) self.reply_q = kwargs.pop('reply_q', None) @@ -341,8 +323,9 @@ def _add_unique_id(msg): class _ThreadPoolWithWait(object): - """Base class for a delayed invocation manager used by - the Connection class to start up green threads + """Base class for a delayed invocation manager. + + Used by the Connection class to start up green threads to handle incoming messages. """ @@ -357,12 +340,14 @@ def wait(self): class CallbackWrapper(_ThreadPoolWithWait): - """Wraps a straight callback to allow it to be invoked in a green - thread. + """Wraps a straight callback. + + Allows it to be invoked in a green thread. """ def __init__(self, conf, callback, connection_pool): - """ + """Initiates CallbackWrapper object. + :param conf: cfg.CONF instance :param callback: a callable (probably a function) :param connection_pool: connection pool as returned by @@ -493,7 +478,7 @@ def _process_data(self, data): return result def __iter__(self): - """Return a result until we get a reply with an 'ending" flag""" + """Return a result until we get a reply with an 'ending' flag.""" if self._done: raise StopIteration while True: @@ -515,61 +500,8 @@ def __iter__(self): yield result -#TODO(pekowski): Remove MulticallWaiter() in Havana. -class MulticallWaiter(object): - def __init__(self, conf, connection, timeout): - self._connection = connection - self._iterator = connection.iterconsume(timeout=timeout or - conf.rpc_response_timeout) - self._result = None - self._done = False - self._got_ending = False - self._conf = conf - self.msg_id_cache = _MsgIdCache() - - def done(self): - if self._done: - return - self._done = True - self._iterator.close() - self._iterator = None - self._connection.close() - - def __call__(self, data): - """The consume() callback will call this. Store the result.""" - self.msg_id_cache.check_duplicate_message(data) - if data['failure']: - failure = data['failure'] - self._result = rpc_common.deserialize_remote_exception(self._conf, - failure) - - elif data.get('ending', False): - self._got_ending = True - else: - self._result = data['result'] - - def __iter__(self): - """Return a result until we get a 'None' response from consumer""" - if self._done: - raise StopIteration - while True: - try: - self._iterator.next() - except Exception: - with excutils.save_and_reraise_exception(): - self.done() - if self._got_ending: - self.done() - raise StopIteration - result = self._result - if isinstance(result, Exception): - self.done() - raise result - yield result - - def create_connection(conf, new, connection_pool): - """Create a connection""" + """Create a connection.""" return ConnectionContext(conf, connection_pool, pooled=not new) @@ -578,14 +510,6 @@ def create_connection(conf, new, connection_pool): def multicall(conf, context, topic, msg, timeout, connection_pool): """Make a call that returns multiple times.""" - # TODO(pekowski): Remove all these comments in Havana. - # For amqp_rpc_single_reply_queue = False, - # Can't use 'with' for multicall, as it returns an iterator - # that will continue to use the connection. When it's done, - # connection.close() will get called which will put it back into - # the pool - # For amqp_rpc_single_reply_queue = True, - # The 'with' statement is mandatory for closing the connection LOG.debug(_('Making synchronous call on %s ...'), topic) msg_id = uuid.uuid4().hex msg.update({'_msg_id': msg_id}) @@ -593,21 +517,13 @@ def multicall(conf, context, topic, msg, timeout, connection_pool): _add_unique_id(msg) pack_context(msg, context) - # TODO(pekowski): Remove this flag and the code under the if clause - # in Havana. - if not conf.amqp_rpc_single_reply_queue: - conn = ConnectionContext(conf, connection_pool) - wait_msg = MulticallWaiter(conf, conn, timeout) - conn.declare_direct_consumer(msg_id, wait_msg) + with _reply_proxy_create_sem: + if not connection_pool.reply_proxy: + connection_pool.reply_proxy = ReplyProxy(conf, connection_pool) + msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()}) + wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool) + with ConnectionContext(conf, connection_pool) as conn: conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout) - else: - with _reply_proxy_create_sem: - if not connection_pool.reply_proxy: - connection_pool.reply_proxy = ReplyProxy(conf, connection_pool) - msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()}) - wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool) - with ConnectionContext(conf, connection_pool) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout) return wait_msg diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py index bc1e345..1992401 100644 --- a/billingstack/openstack/common/rpc/common.py +++ b/billingstack/openstack/common/rpc/common.py @@ -22,6 +22,7 @@ import traceback from oslo.config import cfg +import six from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import importutils @@ -69,6 +70,8 @@ _VERSION_KEY = 'oslo.version' _MESSAGE_KEY = 'oslo.message' +_REMOTE_POSTFIX = '_Remote' + class RPCException(Exception): message = _("An unknown RPC related exception occurred.") @@ -123,7 +126,8 @@ class Timeout(RPCException): 'info: "%(info)s"') def __init__(self, info=None, topic=None, method=None): - """ + """Initiates Timeout object. + :param info: Extra info to convey to the user :param topic: The topic that the rpc call was sent to :param rpc_method_name: The name of the rpc method being @@ -157,6 +161,10 @@ class UnsupportedRpcEnvelopeVersion(RPCException): "not supported by this endpoint.") +class RpcVersionCapError(RPCException): + message = _("Specified RPC version cap, %(version_cap)s, is too low") + + class Connection(object): """A connection, returned by rpc.create_connection(). @@ -216,9 +224,9 @@ def create_worker(self, topic, proxy, pool_name): raise NotImplementedError() def join_consumer_pool(self, callback, pool_name, topic, exchange_name): - """Register as a member of a group of consumers for a given topic from - the specified exchange. + """Register as a member of a group of consumers. + Uses given topic from the specified exchange. Exactly one member of a given pool will receive each message. A message will be delivered to multiple pools, if more than @@ -250,21 +258,6 @@ def consume_in_thread(self): """ raise NotImplementedError() - def consume_in_thread_group(self, thread_group): - """ - Spawn a thread to handle incoming messages in the supplied - ThreadGroup. - - Spawn a thread that will be responsible for handling all incoming - messages for consumers that were set up on this connection. - - Message dispatching inside of this is expected to be implemented in a - non-blocking manner. An example implementation would be having this - thread pull messages in for all of the consumers, but utilize a thread - pool for dispatching the messages to the proxy objects. - """ - raise NotImplementedError() - def _safe_log(log_func, msg, msg_data): """Sanitizes the msg_data field before logging.""" @@ -291,7 +284,7 @@ def _safe_log(log_func, msg, msg_data): for elem in arg[:-1]: d = d[elem] d[arg[-1]] = '' - except KeyError, e: + except KeyError as e: LOG.info(_('Failed to sanitize %(item)s. Key error %(err)s'), {'item': arg, 'err': e}) @@ -314,17 +307,27 @@ def serialize_remote_exception(failure_info, log_failure=True): tb = traceback.format_exception(*failure_info) failure = failure_info[1] if log_failure: - LOG.error(_("Returning exception %s to caller"), unicode(failure)) + LOG.error(_("Returning exception %s to caller"), + six.text_type(failure)) LOG.error(tb) kwargs = {} if hasattr(failure, 'kwargs'): kwargs = failure.kwargs + # NOTE(matiu): With cells, it's possible to re-raise remote, remote + # exceptions. Lets turn it back into the original exception type. + cls_name = str(failure.__class__.__name__) + mod_name = str(failure.__class__.__module__) + if (cls_name.endswith(_REMOTE_POSTFIX) and + mod_name.endswith(_REMOTE_POSTFIX)): + cls_name = cls_name[:-len(_REMOTE_POSTFIX)] + mod_name = mod_name[:-len(_REMOTE_POSTFIX)] + data = { - 'class': str(failure.__class__.__name__), - 'module': str(failure.__class__.__module__), - 'message': unicode(failure), + 'class': cls_name, + 'module': mod_name, + 'message': six.text_type(failure), 'tb': tb, 'args': failure.args, 'kwargs': kwargs @@ -360,8 +363,9 @@ def deserialize_remote_exception(conf, data): ex_type = type(failure) str_override = lambda self: message - new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,), + new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type,), {'__str__': str_override, '__unicode__': str_override}) + new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) try: # NOTE(ameade): Dynamically create a new exception type and swap it in # as the new type for the exception. This only works on user defined @@ -423,10 +427,11 @@ def elevated(self, read_deleted=None, overwrite=False): class ClientException(Exception): - """This encapsulates some actual exception that is expected to be - hit by an RPC proxy object. Merely instantiating it records the - current exception information, which will be passed back to the - RPC client without exceptional logging.""" + """Encapsulates actual exception expected to be hit by a RPC proxy object. + + Merely instantiating it records the current exception information, which + will be passed back to the RPC client without exceptional logging. + """ def __init__(self): self._exc_info = sys.exc_info() @@ -434,7 +439,7 @@ def __init__(self): def catch_client_exception(exceptions, func, *args, **kwargs): try: return func(*args, **kwargs) - except Exception, e: + except Exception as e: if type(e) in exceptions: raise ClientException() else: @@ -443,11 +448,13 @@ def catch_client_exception(exceptions, func, *args, **kwargs): def client_exceptions(*exceptions): """Decorator for manager methods that raise expected exceptions. + Marking a Manager method with this decorator allows the declaration of expected exceptions that the RPC layer should not consider fatal, and not log as if they were generated in a real error scenario. Note that this will cause listed exceptions to be wrapped in a - ClientException, which is used internally by the RPC layer.""" + ClientException, which is used internally by the RPC layer. + """ def outer(func): def inner(*args, **kwargs): return catch_client_exception(exceptions, func, *args, **kwargs) diff --git a/billingstack/openstack/common/rpc/dispatcher.py b/billingstack/openstack/common/rpc/dispatcher.py index e3f2067..05ce1d0 100644 --- a/billingstack/openstack/common/rpc/dispatcher.py +++ b/billingstack/openstack/common/rpc/dispatcher.py @@ -84,6 +84,7 @@ def some_remote_method(self, arg1, arg2, newarg=None): """ from billingstack.openstack.common.rpc import common as rpc_common +from billingstack.openstack.common.rpc import serializer as rpc_serializer class RpcDispatcher(object): @@ -93,16 +94,38 @@ class RpcDispatcher(object): contains a list of underlying managers that have an API_VERSION attribute. """ - def __init__(self, callbacks): + def __init__(self, callbacks, serializer=None): """Initialize the rpc dispatcher. :param callbacks: List of proxy objects that are an instance of a class with rpc methods exposed. Each proxy object should have an RPC_API_VERSION attribute. + :param serializer: The Serializer object that will be used to + deserialize arguments before the method call and + to serialize the result after it returns. """ self.callbacks = callbacks + if serializer is None: + serializer = rpc_serializer.NoOpSerializer() + self.serializer = serializer super(RpcDispatcher, self).__init__() + def _deserialize_args(self, context, kwargs): + """Helper method called to deserialize args before dispatch. + + This calls our serializer on each argument, returning a new set of + args that have been deserialized. + + :param context: The request context + :param kwargs: The arguments to be deserialized + :returns: A new set of deserialized args + """ + new_kwargs = dict() + for argname, arg in kwargs.iteritems(): + new_kwargs[argname] = self.serializer.deserialize_entity(context, + arg) + return new_kwargs + def dispatch(self, ctxt, version, method, namespace, **kwargs): """Dispatch a message based on a requested version. @@ -145,7 +168,9 @@ def dispatch(self, ctxt, version, method, namespace, **kwargs): if not hasattr(proxyobj, method): continue if is_compatible: - return getattr(proxyobj, method)(ctxt, **kwargs) + kwargs = self._deserialize_args(ctxt, kwargs) + result = getattr(proxyobj, method)(ctxt, **kwargs) + return self.serializer.serialize_entity(ctxt, result) if had_compatible: raise AttributeError("No such RPC function '%s'" % method) diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py index a71c0f5..ef4a39f 100644 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ b/billingstack/openstack/common/rpc/impl_fake.py @@ -120,12 +120,9 @@ def close(self): def consume_in_thread(self): pass - def consume_in_thread_group(self, thread_group): - pass - def create_connection(conf, new=True): - """Create a connection""" + """Create a connection.""" return Connection() @@ -182,7 +179,7 @@ def cleanup(): def fanout_cast(conf, context, topic, msg): - """Cast to all consumers of a topic""" + """Cast to all consumers of a topic.""" check_serialize(msg) method = msg.get('method') if not method: diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index b3c2024..b5197be 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -30,6 +30,7 @@ import kombu.messaging from oslo.config import cfg +from billingstack.openstack.common import excutils from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import network_utils from billingstack.openstack.common.rpc import amqp as rpc_amqp @@ -129,15 +130,46 @@ def __init__(self, channel, callback, tag, **kwargs): self.tag = str(tag) self.kwargs = kwargs self.queue = None + self.ack_on_error = kwargs.get('ack_on_error', True) self.reconnect(channel) def reconnect(self, channel): - """Re-declare the queue after a rabbit reconnect""" + """Re-declare the queue after a rabbit reconnect.""" self.channel = channel self.kwargs['channel'] = channel self.queue = kombu.entity.Queue(**self.kwargs) self.queue.declare() + def _callback_handler(self, message, callback): + """Call callback with deserialized message. + + Messages that are processed without exception are ack'ed. + + If the message processing generates an exception, it will be + ack'ed if ack_on_error=True. Otherwise it will be .reject()'ed. + Rejection is better than waiting for the message to timeout. + Rejected messages are immediately requeued. + """ + + ack_msg = False + try: + msg = rpc_common.deserialize_msg(message.payload) + callback(msg) + ack_msg = True + except Exception: + if self.ack_on_error: + ack_msg = True + LOG.exception(_("Failed to process message" + " ... skipping it.")) + else: + LOG.exception(_("Failed to process message" + " ... will requeue.")) + finally: + if ack_msg: + message.ack() + else: + message.reject() + def consume(self, *args, **kwargs): """Actually declare the consumer on the amqp channel. This will start the flow of messages from the queue. Using the @@ -150,8 +182,6 @@ def consume(self, *args, **kwargs): If kwargs['nowait'] is True, then this call will block until a message is read. - Messages will automatically be acked if the callback doesn't - raise an exception """ options = {'consumer_tag': self.tag} @@ -162,21 +192,15 @@ def consume(self, *args, **kwargs): def _callback(raw_message): message = self.channel.message_to_python(raw_message) - try: - msg = rpc_common.deserialize_msg(message.payload) - callback(msg) - except Exception: - LOG.exception(_("Failed to process message... skipping it.")) - finally: - message.ack() + self._callback_handler(message, callback) self.queue.consume(*args, callback=_callback, **options) def cancel(self): - """Cancel the consuming from the queue, if it has started""" + """Cancel the consuming from the queue, if it has started.""" try: self.queue.cancel(self.tag) - except KeyError, e: + except KeyError as e: # NOTE(comstud): Kludge to get around a amqplib bug if str(e) != "u'%s'" % self.tag: raise @@ -184,7 +208,7 @@ def cancel(self): class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'""" + """Queue/consumer class for 'direct'.""" def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): """Init a 'direct' queue. @@ -216,7 +240,7 @@ def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'""" + """Consumer class for 'topic'.""" def __init__(self, conf, channel, topic, callback, tag, name=None, exchange_name=None, **kwargs): @@ -253,7 +277,7 @@ def __init__(self, conf, channel, topic, callback, tag, name=None, class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'""" + """Consumer class for 'fanout'.""" def __init__(self, conf, channel, topic, callback, tag, **kwargs): """Init a 'fanout' queue. @@ -286,7 +310,7 @@ def __init__(self, conf, channel, topic, callback, tag, **kwargs): class Publisher(object): - """Base Publisher class""" + """Base Publisher class.""" def __init__(self, channel, exchange_name, routing_key, **kwargs): """Init the Publisher class with the exchange_name, routing_key, @@ -298,7 +322,7 @@ def __init__(self, channel, exchange_name, routing_key, **kwargs): self.reconnect(channel) def reconnect(self, channel): - """Re-establish the Producer after a rabbit reconnection""" + """Re-establish the Producer after a rabbit reconnection.""" self.exchange = kombu.entity.Exchange(name=self.exchange_name, **self.kwargs) self.producer = kombu.messaging.Producer(exchange=self.exchange, @@ -306,7 +330,7 @@ def reconnect(self, channel): routing_key=self.routing_key) def send(self, msg, timeout=None): - """Send a message""" + """Send a message.""" if timeout: # # AMQP TTL is in milliseconds when set in the header. @@ -317,7 +341,7 @@ def send(self, msg, timeout=None): class DirectPublisher(Publisher): - """Publisher class for 'direct'""" + """Publisher class for 'direct'.""" def __init__(self, conf, channel, msg_id, **kwargs): """init a 'direct' publisher. @@ -333,7 +357,7 @@ def __init__(self, conf, channel, msg_id, **kwargs): class TopicPublisher(Publisher): - """Publisher class for 'topic'""" + """Publisher class for 'topic'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'topic' publisher. @@ -352,7 +376,7 @@ def __init__(self, conf, channel, topic, **kwargs): class FanoutPublisher(Publisher): - """Publisher class for 'fanout'""" + """Publisher class for 'fanout'.""" def __init__(self, conf, channel, topic, **kwargs): """init a 'fanout' publisher. @@ -367,7 +391,7 @@ def __init__(self, conf, channel, topic, **kwargs): class NotifyPublisher(TopicPublisher): - """Publisher class for 'notify'""" + """Publisher class for 'notify'.""" def __init__(self, conf, channel, topic, **kwargs): self.durable = kwargs.pop('durable', conf.rabbit_durable_queues) @@ -447,8 +471,9 @@ def __init__(self, conf, server_params=None): self.reconnect() def _fetch_ssl_params(self): - """Handles fetching what ssl params - should be used for the connection (if any)""" + """Handles fetching what ssl params should be used for the connection + (if any). + """ ssl_params = dict() # http://docs.python.org/library/ssl.html - ssl.wrap_socket @@ -520,7 +545,7 @@ def reconnect(self): return except (IOError, self.connection_errors) as e: pass - except Exception, e: + except Exception as e: # NOTE(comstud): Unfortunately it's possible for amqplib # to return an error not covered by its transport # connection_errors in the case of a timeout waiting for @@ -561,10 +586,10 @@ def ensure(self, error_callback, method, *args, **kwargs): while True: try: return method(*args, **kwargs) - except (self.connection_errors, socket.timeout, IOError), e: + except (self.connection_errors, socket.timeout, IOError) as e: if error_callback: error_callback(e) - except Exception, e: + except Exception as e: # NOTE(comstud): Unfortunately it's possible for amqplib # to return an error not covered by its transport # connection_errors in the case of a timeout waiting for @@ -578,18 +603,18 @@ def ensure(self, error_callback, method, *args, **kwargs): self.reconnect() def get_channel(self): - """Convenience call for bin/clear_rabbit_queues""" + """Convenience call for bin/clear_rabbit_queues.""" return self.channel def close(self): - """Close/release this connection""" + """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.connection.release() self.connection = None def reset(self): - """Reset a connection so it can be used again""" + """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.channel.close() @@ -618,7 +643,7 @@ def _declare_consumer(): return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers""" + """Return an iterator that will consume from all queues/consumers.""" info = {'do_consume': True} @@ -634,8 +659,8 @@ def _error_callback(exc): def _consume(): if info['do_consume']: - queues_head = self.consumers[:-1] - queues_tail = self.consumers[-1] + queues_head = self.consumers[:-1] # not fanout. + queues_tail = self.consumers[-1] # fanout for queue in queues_head: queue.consume(nowait=True) queues_tail.consume(nowait=False) @@ -648,7 +673,7 @@ def _consume(): yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): - """Cancel a consumer thread""" + """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: @@ -663,7 +688,7 @@ def wait_on_proxy_callbacks(self): proxy_cb.wait() def publisher_send(self, cls, topic, msg, timeout=None, **kwargs): - """Send to a publisher based on the publisher class""" + """Send to a publisher based on the publisher class.""" def _error_callback(exc): log_info = {'topic': topic, 'err_str': str(exc)} @@ -684,36 +709,37 @@ def declare_direct_consumer(self, topic, callback): self.declare_consumer(DirectConsumer, topic, callback) def declare_topic_consumer(self, topic, callback=None, queue_name=None, - exchange_name=None): + exchange_name=None, ack_on_error=True): """Create a 'topic' consumer.""" self.declare_consumer(functools.partial(TopicConsumer, name=queue_name, exchange_name=exchange_name, + ack_on_error=ack_on_error, ), topic, callback) def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer""" + """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): - """Send a 'direct' message""" + """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message""" + """Send a 'topic' message.""" self.publisher_send(TopicPublisher, topic, msg, timeout) def fanout_send(self, topic, msg): - """Send a 'fanout' message""" + """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic""" + """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs) def consume(self, limit=None): - """Consume from all queues/consumers""" + """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: @@ -721,27 +747,20 @@ def consume(self, limit=None): except StopIteration: return - def _consumer_thread_callback(self): - """ Consumer thread callback used by consume_in_* """ - try: - self.consume() - except greenlet.GreenletExit: - return - def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread""" - + """Consumer from all queues/consumers in a greenthread.""" + @excutils.forever_retry_uncaught_exceptions + def _consumer_thread(): + try: + self.consume() + except greenlet.GreenletExit: + return if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn( - self._consumer_thread_callback) + self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread - def consume_in_thread_group(self, thread_group): - """ Consume from all queues/consumers in the supplied ThreadGroup""" - thread_group.add_thread(self._consumer_thread_callback) - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object""" + """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -753,7 +772,7 @@ def create_consumer(self, topic, proxy, fanout=False): self.declare_topic_consumer(topic, proxy_cb) def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object""" + """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -761,7 +780,7 @@ def create_worker(self, topic, proxy, pool_name): self.declare_topic_consumer(topic, proxy_cb, pool_name) def join_consumer_pool(self, callback, pool_name, topic, - exchange_name=None): + exchange_name=None, ack_on_error=True): """Register as a member of a group of consumers for a given topic from the specified exchange. @@ -782,11 +801,12 @@ def join_consumer_pool(self, callback, pool_name, topic, topic=topic, exchange_name=exchange_name, callback=callback_wrapper, + ack_on_error=ack_on_error, ) def create_connection(conf, new=True): - """Create a connection""" + """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index 356886a..13997b8 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -24,6 +24,7 @@ import greenlet from oslo.config import cfg +from billingstack.openstack.common import excutils from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils @@ -31,6 +32,7 @@ from billingstack.openstack.common.rpc import amqp as rpc_amqp from billingstack.openstack.common.rpc import common as rpc_common +qpid_codec = importutils.try_import("qpid.codec010") qpid_messaging = importutils.try_import("qpid.messaging") qpid_exceptions = importutils.try_import("qpid.messaging.exceptions") @@ -69,6 +71,8 @@ cfg.CONF.register_opts(qpid_opts) +JSON_CONTENT_TYPE = 'application/json; charset=utf8' + class ConsumerBase(object): """Consumer base class.""" @@ -115,31 +119,59 @@ def __init__(self, session, callback, node_name, node_opts, self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) - self.reconnect(session) + self.connect(session) + + def connect(self, session): + """Declare the reciever on connect.""" + self._declare_receiver(session) def reconnect(self, session): - """Re-declare the receiver after a qpid reconnect""" + """Re-declare the receiver after a qpid reconnect.""" + self._declare_receiver(session) + + def _declare_receiver(self, session): self.session = session self.receiver = session.receiver(self.address) self.receiver.capacity = 1 + def _unpack_json_msg(self, msg): + """Load the JSON data in msg if msg.content_type indicates that it + is necessary. Put the loaded data back into msg.content and + update msg.content_type appropriately. + + A Qpid Message containing a dict will have a content_type of + 'amqp/map', whereas one containing a string that needs to be converted + back from JSON will have a content_type of JSON_CONTENT_TYPE. + + :param msg: a Qpid Message object + :returns: None + """ + if msg.content_type == JSON_CONTENT_TYPE: + msg.content = jsonutils.loads(msg.content) + msg.content_type = 'amqp/map' + def consume(self): - """Fetch the message and pass it to the callback object""" + """Fetch the message and pass it to the callback object.""" message = self.receiver.fetch() try: + self._unpack_json_msg(message) msg = rpc_common.deserialize_msg(message.content) self.callback(msg) except Exception: LOG.exception(_("Failed to process message... skipping it.")) finally: + # TODO(sandy): Need support for optional ack_on_error. self.session.acknowledge(message) def get_receiver(self): return self.receiver + def get_node_name(self): + return self.address.split(';')[0] + class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'""" + """Queue/consumer class for 'direct'.""" def __init__(self, conf, session, msg_id, callback): """Init a 'direct' queue. @@ -157,7 +189,7 @@ def __init__(self, conf, session, msg_id, callback): class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'""" + """Consumer class for 'topic'.""" def __init__(self, conf, session, topic, callback, name=None, exchange_name=None): @@ -177,7 +209,7 @@ def __init__(self, conf, session, topic, callback, name=None, class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'""" + """Consumer class for 'fanout'.""" def __init__(self, conf, session, topic, callback): """Init a 'fanout' queue. @@ -186,6 +218,7 @@ def __init__(self, conf, session, topic, callback): 'topic' is the topic to listen on 'callback' is the callback to call when messages are received """ + self.conf = conf super(FanoutConsumer, self).__init__( session, callback, @@ -194,9 +227,21 @@ def __init__(self, conf, session, topic, callback): "%s_fanout_%s" % (topic, uuid.uuid4().hex), {"exclusive": True}) + def reconnect(self, session): + topic = self.get_node_name() + params = { + 'session': session, + 'topic': topic, + 'callback': self.callback, + } + + self.__init__(conf=self.conf, **params) + + super(FanoutConsumer, self).reconnect(session) + class Publisher(object): - """Base Publisher class""" + """Base Publisher class.""" def __init__(self, session, node_name, node_opts=None): """Init the Publisher class with the exchange_name, routing_key, @@ -225,16 +270,43 @@ def __init__(self, session, node_name, node_opts=None): self.reconnect(session) def reconnect(self, session): - """Re-establish the Sender after a reconnection""" + """Re-establish the Sender after a reconnection.""" self.sender = session.sender(self.address) + def _pack_json_msg(self, msg): + """Qpid cannot serialize dicts containing strings longer than 65535 + characters. This function dumps the message content to a JSON + string, which Qpid is able to handle. + + :param msg: May be either a Qpid Message object or a bare dict. + :returns: A Qpid Message with its content field JSON encoded. + """ + try: + msg.content = jsonutils.dumps(msg.content) + except AttributeError: + # Need to have a Qpid message so we can set the content_type. + msg = qpid_messaging.Message(jsonutils.dumps(msg)) + msg.content_type = JSON_CONTENT_TYPE + return msg + def send(self, msg): - """Send a message""" + """Send a message.""" + try: + # Check if Qpid can encode the message + check_msg = msg + if not hasattr(check_msg, 'content_type'): + check_msg = qpid_messaging.Message(msg) + content_type = check_msg.content_type + enc, dec = qpid_messaging.message.get_codec(content_type) + enc(check_msg.content) + except qpid_codec.CodecException: + # This means the message couldn't be serialized as a dict. + msg = self._pack_json_msg(msg) self.sender.send(msg) class DirectPublisher(Publisher): - """Publisher class for 'direct'""" + """Publisher class for 'direct'.""" def __init__(self, conf, session, msg_id): """Init a 'direct' publisher.""" super(DirectPublisher, self).__init__(session, msg_id, @@ -242,7 +314,7 @@ def __init__(self, conf, session, msg_id): class TopicPublisher(Publisher): - """Publisher class for 'topic'""" + """Publisher class for 'topic'.""" def __init__(self, conf, session, topic): """init a 'topic' publisher. """ @@ -252,7 +324,7 @@ def __init__(self, conf, session, topic): class FanoutPublisher(Publisher): - """Publisher class for 'fanout'""" + """Publisher class for 'fanout'.""" def __init__(self, conf, session, topic): """init a 'fanout' publisher. """ @@ -262,7 +334,7 @@ def __init__(self, conf, session, topic): class NotifyPublisher(Publisher): - """Publisher class for notifications""" + """Publisher class for notifications.""" def __init__(self, conf, session, topic): """init a 'topic' publisher. """ @@ -330,23 +402,24 @@ def _lookup_consumer(self, receiver): return self.consumers[str(receiver)] def reconnect(self): - """Handles reconnecting and re-establishing sessions and queues""" - if self.connection.opened(): - try: - self.connection.close() - except qpid_exceptions.ConnectionError: - pass - + """Handles reconnecting and re-establishing sessions and queues.""" attempt = 0 delay = 1 while True: + # Close the session if necessary + if self.connection.opened(): + try: + self.connection.close() + except qpid_exceptions.ConnectionError: + pass + broker = self.brokers[attempt % len(self.brokers)] attempt += 1 try: self.connection_create(broker) self.connection.open() - except qpid_exceptions.ConnectionError, e: + except qpid_exceptions.ConnectionError as e: msg_dict = dict(e=e, delay=delay) msg = _("Unable to connect to AMQP server: %(e)s. " "Sleeping %(delay)s seconds") % msg_dict @@ -374,20 +447,26 @@ def ensure(self, error_callback, method, *args, **kwargs): try: return method(*args, **kwargs) except (qpid_exceptions.Empty, - qpid_exceptions.ConnectionError), e: + qpid_exceptions.ConnectionError) as e: if error_callback: error_callback(e) self.reconnect() def close(self): - """Close/release this connection""" + """Close/release this connection.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() - self.connection.close() + try: + self.connection.close() + except Exception: + # NOTE(dripton) Logging exceptions that happen during cleanup just + # causes confusion; there's really nothing useful we can do with + # them. + pass self.connection = None def reset(self): - """Reset a connection so it can be used again""" + """Reset a connection so it can be used again.""" self.cancel_consumer_thread() self.wait_on_proxy_callbacks() self.session.close() @@ -411,7 +490,7 @@ def _declare_consumer(): return self.ensure(_connect_error, _declare_consumer) def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers""" + """Return an iterator that will consume from all queues/consumers.""" def _error_callback(exc): if isinstance(exc, qpid_exceptions.Empty): @@ -435,7 +514,7 @@ def _consume(): yield self.ensure(_error_callback, _consume) def cancel_consumer_thread(self): - """Cancel a consumer thread""" + """Cancel a consumer thread.""" if self.consumer_thread is not None: self.consumer_thread.kill() try: @@ -450,7 +529,7 @@ def wait_on_proxy_callbacks(self): proxy_cb.wait() def publisher_send(self, cls, topic, msg): - """Send to a publisher based on the publisher class""" + """Send to a publisher based on the publisher class.""" def _connect_error(exc): log_info = {'topic': topic, 'err_str': str(exc)} @@ -480,15 +559,15 @@ def declare_topic_consumer(self, topic, callback=None, queue_name=None, topic, callback) def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer""" + """Create a 'fanout' consumer.""" self.declare_consumer(FanoutConsumer, topic, callback) def direct_send(self, msg_id, msg): - """Send a 'direct' message""" + """Send a 'direct' message.""" self.publisher_send(DirectPublisher, msg_id, msg) def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message""" + """Send a 'topic' message.""" # # We want to create a message with attributes, e.g. a TTL. We # don't really need to keep 'msg' in its JSON format any longer @@ -503,22 +582,15 @@ def topic_send(self, topic, msg, timeout=None): self.publisher_send(TopicPublisher, topic, qpid_message) def fanout_send(self, topic, msg): - """Send a 'fanout' message""" + """Send a 'fanout' message.""" self.publisher_send(FanoutPublisher, topic, msg) def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic""" + """Send a notify message on a topic.""" self.publisher_send(NotifyPublisher, topic, msg) - def _consumer_thread_callback(self): - """ Consumer thread callback used by consume_in_* """ - try: - self.consume() - except greenlet.GreenletExit: - return - def consume(self, limit=None): - """Consume from all queues/consumers""" + """Consume from all queues/consumers.""" it = self.iterconsume(limit=limit) while True: try: @@ -527,19 +599,19 @@ def consume(self, limit=None): return def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread""" - + """Consumer from all queues/consumers in a greenthread.""" + @excutils.forever_retry_uncaught_exceptions + def _consumer_thread(): + try: + self.consume() + except greenlet.GreenletExit: + return if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn( - self._consumer_thread_callback) + self.consumer_thread = eventlet.spawn(_consumer_thread) return self.consumer_thread - def consume_in_thread_group(self, thread_group): - """ Consume from all queues/consumers in the supplied ThreadGroup""" - thread_group.add_thread(self._consumer_thread_callback) - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object""" + """Create a consumer that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -555,7 +627,7 @@ def create_consumer(self, topic, proxy, fanout=False): return consumer def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object""" + """Create a worker that calls a method in a proxy object.""" proxy_cb = rpc_amqp.ProxyCallback( self.conf, proxy, rpc_amqp.get_connection_pool(self.conf, Connection)) @@ -569,7 +641,7 @@ def create_worker(self, topic, proxy, pool_name): return consumer def join_consumer_pool(self, callback, pool_name, topic, - exchange_name=None): + exchange_name=None, ack_on_error=True): """Register as a member of a group of consumers for a given topic from the specified exchange. @@ -598,7 +670,7 @@ def join_consumer_pool(self, callback, pool_name, topic, def create_connection(conf, new=True): - """Create a connection""" + """Create a connection.""" return rpc_amqp.create_connection( conf, new, rpc_amqp.get_connection_pool(conf, Connection)) diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index 0b1c719..9663a31 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -30,7 +30,6 @@ from billingstack.openstack.common.gettextutils import _ from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import processutils as utils from billingstack.openstack.common.rpc import common as rpc_common zmq = importutils.try_import('eventlet.green.zmq') @@ -85,8 +84,8 @@ def _serialize(data): - """ - Serialization wrapper + """Serialization wrapper. + We prefer using JSON, but it cannot encode all types. Error if a developer passes us bad data. """ @@ -98,18 +97,15 @@ def _serialize(data): def _deserialize(data): - """ - Deserialization wrapper - """ + """Deserialization wrapper.""" LOG.debug(_("Deserializing: %s"), data) return jsonutils.loads(data) class ZmqSocket(object): - """ - A tiny wrapper around ZeroMQ to simplify the send/recv protocol - and connection management. + """A tiny wrapper around ZeroMQ. + Simplifies the send/recv protocol and connection management. Can be used as a Context (supports the 'with' statement). """ @@ -180,7 +176,7 @@ def close(self): return # We must unsubscribe, or we'll leak descriptors. - if len(self.subscriptions) > 0: + if self.subscriptions: for f in self.subscriptions: try: self.sock.setsockopt(zmq.UNSUBSCRIBE, f) @@ -199,26 +195,24 @@ def close(self): LOG.error("ZeroMQ socket could not be closed.") self.sock = None - def recv(self): + def recv(self, **kwargs): if not self.can_recv: raise RPCException(_("You cannot recv on this socket.")) - return self.sock.recv_multipart() + return self.sock.recv_multipart(**kwargs) - def send(self, data): + def send(self, data, **kwargs): if not self.can_send: raise RPCException(_("You cannot send on this socket.")) - self.sock.send_multipart(data) + self.sock.send_multipart(data, **kwargs) class ZmqClient(object): """Client for ZMQ sockets.""" - def __init__(self, addr, socket_type=None, bind=False): - if socket_type is None: - socket_type = zmq.PUSH - self.outq = ZmqSocket(addr, socket_type, bind=bind) + def __init__(self, addr): + self.outq = ZmqSocket(addr, zmq.PUSH, bind=False) - def cast(self, msg_id, topic, data, envelope=False): + def cast(self, msg_id, topic, data, envelope): msg_id = msg_id or 0 if not envelope: @@ -282,7 +276,7 @@ def _get_response(self, ctx, proxy, topic, data): except greenlet.GreenletExit: # ignore these since they are just from shutdowns pass - except rpc_common.ClientException, e: + except rpc_common.ClientException as e: LOG.debug(_("Expected exception during message handling (%s)") % e._exc_info[1]) return {'exc': @@ -356,16 +350,14 @@ def process(self, proxy, ctx, data): class ZmqBaseReactor(ConsumerBase): - """ - A consumer class implementing a - centralized casting broker (PULL-PUSH) - for RoundRobin requests. + """A consumer class implementing a centralized casting broker (PULL-PUSH). + + Used for RoundRobin requests. """ def __init__(self, conf): super(ZmqBaseReactor, self).__init__() - self.mapping = {} self.proxies = {} self.threads = [] self.sockets = [] @@ -373,9 +365,8 @@ def __init__(self, conf): self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size) - def register(self, proxy, in_addr, zmq_type_in, out_addr=None, - zmq_type_out=None, in_bind=True, out_bind=True, - subscribe=None): + def register(self, proxy, in_addr, zmq_type_in, + in_bind=True, subscribe=None): LOG.info(_("Registering reactor")) @@ -391,39 +382,17 @@ def register(self, proxy, in_addr, zmq_type_in, out_addr=None, LOG.info(_("In reactor registered")) - if not out_addr: - return - - if zmq_type_out not in (zmq.PUSH, zmq.PUB): - raise RPCException("Bad output socktype") - - # Items push out. - outq = ZmqSocket(out_addr, zmq_type_out, bind=out_bind) - - self.mapping[inq] = outq - self.mapping[outq] = inq - self.sockets.append(outq) - - LOG.info(_("Out reactor registered")) - - def _consumer_thread_callback(self, sock): - """ Consumer thread callback used by consume_in_* """ - - LOG.info(_("Consuming socket")) - while True: - self.consume(sock) - def consume_in_thread(self): + def _consume(sock): + LOG.info(_("Consuming socket")) + while True: + self.consume(sock) + for k in self.proxies.keys(): self.threads.append( - self.pool.spawn(self._consumer_thread_callback, k) + self.pool.spawn(_consume, k) ) - def consume_in_thread_group(self, thread_group): - """ Consume from all queues/consumers in the supplied ThreadGroup""" - for k in self.proxies.keys(): - thread_group.add_thread(self._consumer_thread_callback, k) - def wait(self): for t in self.threads: t.wait() @@ -437,10 +406,9 @@ def close(self): class ZmqProxy(ZmqBaseReactor): - """ - A consumer class implementing a - topic-based proxy, forwarding to - IPC sockets. + """A consumer class implementing a topic-based proxy. + + Forwards to IPC sockets. """ def __init__(self, conf): @@ -453,11 +421,8 @@ def __init__(self, conf): def consume(self, sock): ipc_dir = CONF.rpc_zmq_ipc_dir - #TODO(ewindisch): use zero-copy (i.e. references, not copying) - data = sock.recv() - topic = data[1] - - LOG.debug(_("CONSUMER GOT %s"), ' '.join(map(pformat, data))) + data = sock.recv(copy=False) + topic = data[1].bytes if topic.startswith('fanout~'): sock_type = zmq.PUB @@ -499,9 +464,7 @@ def publisher(waiter): while(True): data = self.topic_proxy[topic].get() - out_sock.send(data) - LOG.debug(_("ROUTER RELAY-OUT SUCCEEDED %(data)s") % - {'data': data}) + out_sock.send(data, copy=False) wait_sock_creation = eventlet.event.Event() eventlet.spawn(publisher, wait_sock_creation) @@ -514,37 +477,34 @@ def publisher(waiter): try: self.topic_proxy[topic].put_nowait(data) - LOG.debug(_("ROUTER RELAY-OUT QUEUED %(data)s") % - {'data': data}) except eventlet.queue.Full: LOG.error(_("Local per-topic backlog buffer full for topic " "%(topic)s. Dropping message.") % {'topic': topic}) def consume_in_thread(self): - """Runs the ZmqProxy service""" + """Runs the ZmqProxy service.""" ipc_dir = CONF.rpc_zmq_ipc_dir consume_in = "tcp://%s:%s" % \ (CONF.rpc_zmq_bind_address, CONF.rpc_zmq_port) consumption_proxy = InternalContext(None) - if not os.path.isdir(ipc_dir): - try: - utils.execute('mkdir', '-p', ipc_dir, run_as_root=True) - utils.execute('chown', "%s:%s" % (os.getuid(), os.getgid()), - ipc_dir, run_as_root=True) - utils.execute('chmod', '750', ipc_dir, run_as_root=True) - except utils.ProcessExecutionError: + try: + os.makedirs(ipc_dir) + except os.error: + if not os.path.isdir(ipc_dir): with excutils.save_and_reraise_exception(): - LOG.error(_("Could not create IPC directory %s") % - (ipc_dir, )) - + LOG.error(_("Required IPC directory does not exist at" + " %s") % (ipc_dir, )) try: self.register(consumption_proxy, consume_in, - zmq.PULL, - out_bind=True) + zmq.PULL) except zmq.ZMQError: + if os.access(ipc_dir, os.X_OK): + with excutils.save_and_reraise_exception(): + LOG.error(_("Permission denied to IPC directory at" + " %s") % (ipc_dir, )) with excutils.save_and_reraise_exception(): LOG.error(_("Could not create ZeroMQ receiver daemon. " "Socket may already be in use.")) @@ -554,8 +514,9 @@ def consume_in_thread(self): def unflatten_envelope(packenv): """Unflattens the RPC envelope. - Takes a list and returns a dictionary. - i.e. [1,2,3,4] => {1: 2, 3: 4} + + Takes a list and returns a dictionary. + i.e. [1,2,3,4] => {1: 2, 3: 4} """ i = iter(packenv) h = {} @@ -568,10 +529,9 @@ def unflatten_envelope(packenv): class ZmqReactor(ZmqBaseReactor): - """ - A consumer class implementing a - consumer for messages. Can also be - used as a 1:1 proxy + """A consumer class implementing a consumer for messages. + + Can also be used as a 1:1 proxy """ def __init__(self, conf): @@ -581,11 +541,6 @@ def consume(self, sock): #TODO(ewindisch): use zero-copy (i.e. references, not copying) data = sock.recv() LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data) - if sock in self.mapping: - LOG.debug(_("ROUTER RELAY-OUT %(data)s") % { - 'data': data}) - self.mapping[sock].send(data) - return proxy = self.proxies[sock] @@ -661,9 +616,6 @@ def consume_in_thread(self): _get_matchmaker().start_heartbeat() self.reactor.consume_in_thread() - def consume_in_thread_group(self, thread_group): - self.reactor.consume_in_thread_group(thread_group) - def _cast(addr, context, topic, msg, timeout=None, envelope=False, _msg_id=None): @@ -761,10 +713,9 @@ def _call(addr, context, topic, msg, timeout=None, def _multi_send(method, context, topic, msg, timeout=None, envelope=False, _msg_id=None): - """ - Wraps the sending of messages, - dispatches to the matchmaker and sends - message to all relevant hosts. + """Wraps the sending of messages. + + Dispatches to the matchmaker and sends message to all relevant hosts. """ conf = CONF LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))}) @@ -773,7 +724,7 @@ def _multi_send(method, context, topic, msg, timeout=None, LOG.debug(_("Sending message(s) to: %s"), queues) # Don't stack if we have no matchmaker results - if len(queues) == 0: + if not queues: LOG.warn(_("No matchmaker results. Not casting.")) # While not strictly a timeout, callers know how to handle # this exception and a timeout isn't too big a lie. @@ -821,8 +772,8 @@ def fanout_cast(conf, context, topic, msg, **kwargs): def notify(conf, context, topic, msg, envelope): - """ - Send notification event. + """Send notification event. + Notifications are sent to topic-priority. This differs from the AMQP drivers which send to topic.priority. """ @@ -856,6 +807,11 @@ def _get_ctxt(): def _get_matchmaker(*args, **kwargs): global matchmaker if not matchmaker: - matchmaker = importutils.import_object( - CONF.rpc_zmq_matchmaker, *args, **kwargs) + mm = CONF.rpc_zmq_matchmaker + if mm.endswith('matchmaker.MatchMakerRing'): + mm.replace('matchmaker', 'matchmaker_ring') + LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use' + ' %(new)s instead') % dict( + orig=CONF.rpc_zmq_matchmaker, new=mm)) + matchmaker = importutils.import_object(mm, *args, **kwargs) return matchmaker diff --git a/billingstack/openstack/common/rpc/matchmaker.py b/billingstack/openstack/common/rpc/matchmaker.py index d6ee67e..722b6f8 100644 --- a/billingstack/openstack/common/rpc/matchmaker.py +++ b/billingstack/openstack/common/rpc/matchmaker.py @@ -19,8 +19,6 @@ """ import contextlib -import itertools -import json import eventlet from oslo.config import cfg @@ -30,10 +28,6 @@ matchmaker_opts = [ - # Matchmaker ring file - cfg.StrOpt('matchmaker_ringfile', - default='/etc/nova/matchmaker_ring.json', - help='Matchmaker ring file (JSON)'), cfg.IntOpt('matchmaker_heartbeat_freq', default=300, help='Heartbeat frequency'), @@ -54,8 +48,8 @@ class MatchMakerException(Exception): class Exchange(object): - """ - Implements lookups. + """Implements lookups. + Subclass this to support hashtables, dns, etc. """ def __init__(self): @@ -66,9 +60,7 @@ def run(self, key): class Binding(object): - """ - A binding on which to perform a lookup. - """ + """A binding on which to perform a lookup.""" def __init__(self): pass @@ -77,10 +69,10 @@ def test(self, key): class MatchMakerBase(object): - """ - Match Maker Base Class. - Build off HeartbeatMatchMakerBase if building a - heartbeat-capable MatchMaker. + """Match Maker Base Class. + + Build off HeartbeatMatchMakerBase if building a heartbeat-capable + MatchMaker. """ def __init__(self): # Array of tuples. Index [2] toggles negation, [3] is last-if-true @@ -90,58 +82,47 @@ def __init__(self): 'registration or heartbeat.') def register(self, key, host): - """ - Register a host on a backend. + """Register a host on a backend. + Heartbeats, if applicable, may keepalive registration. """ pass def ack_alive(self, key, host): - """ - Acknowledge that a key.host is alive. - Used internally for updating heartbeats, - but may also be used publically to acknowledge - a system is alive (i.e. rpc message successfully - sent to host) + """Acknowledge that a key.host is alive. + + Used internally for updating heartbeats, but may also be used + publically to acknowledge a system is alive (i.e. rpc message + successfully sent to host) """ pass def is_alive(self, topic, host): - """ - Checks if a host is alive. - """ + """Checks if a host is alive.""" pass def expire(self, topic, host): - """ - Explicitly expire a host's registration. - """ + """Explicitly expire a host's registration.""" pass def send_heartbeats(self): - """ - Send all heartbeats. + """Send all heartbeats. + Use start_heartbeat to spawn a heartbeat greenthread, which loops this method. """ pass def unregister(self, key, host): - """ - Unregister a topic. - """ + """Unregister a topic.""" pass def start_heartbeat(self): - """ - Spawn heartbeat greenthread. - """ + """Spawn heartbeat greenthread.""" pass def stop_heartbeat(self): - """ - Destroys the heartbeat greenthread. - """ + """Destroys the heartbeat greenthread.""" pass def add_binding(self, binding, rule, last=True): @@ -168,10 +149,10 @@ def queues(self, key): class HeartbeatMatchMakerBase(MatchMakerBase): - """ - Base for a heart-beat capable MatchMaker. - Provides common methods for registering, - unregistering, and maintaining heartbeats. + """Base for a heart-beat capable MatchMaker. + + Provides common methods for registering, unregistering, and maintaining + heartbeats. """ def __init__(self): self.hosts = set() @@ -181,8 +162,8 @@ def __init__(self): super(HeartbeatMatchMakerBase, self).__init__() def send_heartbeats(self): - """ - Send all heartbeats. + """Send all heartbeats. + Use start_heartbeat to spawn a heartbeat greenthread, which loops this method. """ @@ -190,32 +171,31 @@ def send_heartbeats(self): self.ack_alive(key, host) def ack_alive(self, key, host): - """ - Acknowledge that a host.topic is alive. - Used internally for updating heartbeats, - but may also be used publically to acknowledge - a system is alive (i.e. rpc message successfully - sent to host) + """Acknowledge that a host.topic is alive. + + Used internally for updating heartbeats, but may also be used + publically to acknowledge a system is alive (i.e. rpc message + successfully sent to host) """ raise NotImplementedError("Must implement ack_alive") def backend_register(self, key, host): - """ - Implements registration logic. + """Implements registration logic. + Called by register(self,key,host) """ raise NotImplementedError("Must implement backend_register") def backend_unregister(self, key, key_host): - """ - Implements de-registration logic. + """Implements de-registration logic. + Called by unregister(self,key,host) """ raise NotImplementedError("Must implement backend_unregister") def register(self, key, host): - """ - Register a host on a backend. + """Register a host on a backend. + Heartbeats, if applicable, may keepalive registration. """ self.hosts.add(host) @@ -227,25 +207,24 @@ def register(self, key, host): self.ack_alive(key, host) def unregister(self, key, host): - """ - Unregister a topic. - """ + """Unregister a topic.""" if (key, host) in self.host_topic: del self.host_topic[(key, host)] self.hosts.discard(host) self.backend_unregister(key, '.'.join((key, host))) - LOG.info(_("Matchmaker unregistered: %s, %s" % (key, host))) + LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), + {'key': key, 'host': host}) def start_heartbeat(self): - """ - Implementation of MatchMakerBase.start_heartbeat + """Implementation of MatchMakerBase.start_heartbeat. + Launches greenthread looping send_heartbeats(), yielding for CONF.matchmaker_heartbeat_freq seconds between iterations. """ - if len(self.hosts) == 0: + if not self.hosts: raise MatchMakerException( _("Register before starting heartbeat.")) @@ -257,16 +236,14 @@ def do_heartbeat(): self._heart = eventlet.spawn(do_heartbeat) def stop_heartbeat(self): - """ - Destroys the heartbeat greenthread. - """ + """Destroys the heartbeat greenthread.""" if self._heart: self._heart.kill() class DirectBinding(Binding): - """ - Specifies a host in the key via a '.' character + """Specifies a host in the key via a '.' character. + Although dots are used in the key, the behavior here is that it maps directly to a host, thus direct. """ @@ -277,8 +254,8 @@ def test(self, key): class TopicBinding(Binding): - """ - Where a 'bare' key without dots. + """Where a 'bare' key without dots. + AMQP generally considers topic exchanges to be those *with* dots, but we deviate here in terminology as the behavior here matches that of a topic exchange (whereas where there are dots, behavior @@ -304,67 +281,6 @@ def run(self, key): return [(key, None)] -class RingExchange(Exchange): - """ - Match Maker where hosts are loaded from a static file containing - a hashmap (JSON formatted). - - __init__ takes optional ring dictionary argument, otherwise - loads the ringfile from CONF.mathcmaker_ringfile. - """ - def __init__(self, ring=None): - super(RingExchange, self).__init__() - - if ring: - self.ring = ring - else: - fh = open(CONF.matchmaker_ringfile, 'r') - self.ring = json.load(fh) - fh.close() - - self.ring0 = {} - for k in self.ring.keys(): - self.ring0[k] = itertools.cycle(self.ring[k]) - - def _ring_has(self, key): - if key in self.ring0: - return True - return False - - -class RoundRobinRingExchange(RingExchange): - """A Topic Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(RoundRobinRingExchange, self).__init__(ring) - - def run(self, key): - if not self._ring_has(key): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile") % (key, ) - ) - return [] - host = next(self.ring0[key]) - return [(key + '.' + host, host)] - - -class FanoutRingExchange(RingExchange): - """Fanout Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(FanoutRingExchange, self).__init__(ring) - - def run(self, key): - # Assume starts with "fanout~", strip it for lookup. - nkey = key.split('fanout~')[1:][0] - if not self._ring_has(nkey): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile") % (nkey, ) - ) - return [] - return map(lambda x: (key + '.' + x, x), self.ring[nkey]) - - class LocalhostExchange(Exchange): """Exchange where all direct topics are local.""" def __init__(self, host='localhost'): @@ -376,8 +292,8 @@ def run(self, key): class DirectExchange(Exchange): - """ - Exchange where all topic keys are split, sending to second half. + """Exchange where all topic keys are split, sending to second half. + i.e. "compute.host" sends a message to "compute.host" running on "host" """ def __init__(self): @@ -388,20 +304,9 @@ def run(self, key): return [(key, e)] -class MatchMakerRing(MatchMakerBase): - """ - Match Maker where hosts are loaded from a static hashmap. - """ - def __init__(self, ring=None): - super(MatchMakerRing, self).__init__() - self.add_binding(FanoutBinding(), FanoutRingExchange(ring)) - self.add_binding(DirectBinding(), DirectExchange()) - self.add_binding(TopicBinding(), RoundRobinRingExchange(ring)) - - class MatchMakerLocalhost(MatchMakerBase): - """ - Match Maker where all bare topics resolve to localhost. + """Match Maker where all bare topics resolve to localhost. + Useful for testing. """ def __init__(self, host='localhost'): @@ -412,13 +317,13 @@ def __init__(self, host='localhost'): class MatchMakerStub(MatchMakerBase): - """ - Match Maker where topics are untouched. + """Match Maker where topics are untouched. + Useful for testing, or for AMQP/brokered queues. Will not work where knowledge of hosts is known (i.e. zeromq) """ def __init__(self): - super(MatchMakerLocalhost, self).__init__() + super(MatchMakerStub, self).__init__() self.add_binding(FanoutBinding(), StubExchange()) self.add_binding(DirectBinding(), StubExchange()) diff --git a/billingstack/openstack/common/rpc/matchmaker_redis.py b/billingstack/openstack/common/rpc/matchmaker_redis.py index 4247d6d..273e164 100644 --- a/billingstack/openstack/common/rpc/matchmaker_redis.py +++ b/billingstack/openstack/common/rpc/matchmaker_redis.py @@ -55,8 +55,8 @@ def __init__(self, matchmaker): class RedisTopicExchange(RedisExchange): - """ - Exchange where all topic keys are split, sending to second half. + """Exchange where all topic keys are split, sending to second half. + i.e. "compute.host" sends a message to "compute" running on "host" """ def run(self, topic): @@ -77,9 +77,7 @@ def run(self, topic): class RedisFanoutExchange(RedisExchange): - """ - Return a list of all hosts. - """ + """Return a list of all hosts.""" def run(self, topic): topic = topic.split('~', 1)[1] hosts = self.redis.smembers(topic) @@ -90,9 +88,7 @@ def run(self, topic): class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase): - """ - MatchMaker registering and looking-up hosts with a Redis server. - """ + """MatchMaker registering and looking-up hosts with a Redis server.""" def __init__(self): super(MatchMakerRedis, self).__init__() diff --git a/billingstack/openstack/common/rpc/matchmaker_ring.py b/billingstack/openstack/common/rpc/matchmaker_ring.py new file mode 100644 index 0000000..ecec28e --- /dev/null +++ b/billingstack/openstack/common/rpc/matchmaker_ring.py @@ -0,0 +1,110 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2011-2013 Cloudscaling Group, Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +""" +The MatchMaker classes should except a Topic or Fanout exchange key and +return keys for direct exchanges, per (approximate) AMQP parlance. +""" + +import itertools +import json + +from oslo.config import cfg + +from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common import log as logging +from billingstack.openstack.common.rpc import matchmaker as mm + + +matchmaker_opts = [ + # Matchmaker ring file + cfg.StrOpt('ringfile', + deprecated_name='matchmaker_ringfile', + deprecated_group='DEFAULT', + default='/etc/oslo/matchmaker_ring.json', + help='Matchmaker ring file (JSON)'), +] + +CONF = cfg.CONF +CONF.register_opts(matchmaker_opts, 'matchmaker_ring') +LOG = logging.getLogger(__name__) + + +class RingExchange(mm.Exchange): + """Match Maker where hosts are loaded from a static JSON formatted file. + + __init__ takes optional ring dictionary argument, otherwise + loads the ringfile from CONF.mathcmaker_ringfile. + """ + def __init__(self, ring=None): + super(RingExchange, self).__init__() + + if ring: + self.ring = ring + else: + fh = open(CONF.matchmaker_ring.ringfile, 'r') + self.ring = json.load(fh) + fh.close() + + self.ring0 = {} + for k in self.ring.keys(): + self.ring0[k] = itertools.cycle(self.ring[k]) + + def _ring_has(self, key): + if key in self.ring0: + return True + return False + + +class RoundRobinRingExchange(RingExchange): + """A Topic Exchange based on a hashmap.""" + def __init__(self, ring=None): + super(RoundRobinRingExchange, self).__init__(ring) + + def run(self, key): + if not self._ring_has(key): + LOG.warn( + _("No key defining hosts for topic '%s', " + "see ringfile") % (key, ) + ) + return [] + host = next(self.ring0[key]) + return [(key + '.' + host, host)] + + +class FanoutRingExchange(RingExchange): + """Fanout Exchange based on a hashmap.""" + def __init__(self, ring=None): + super(FanoutRingExchange, self).__init__(ring) + + def run(self, key): + # Assume starts with "fanout~", strip it for lookup. + nkey = key.split('fanout~')[1:][0] + if not self._ring_has(nkey): + LOG.warn( + _("No key defining hosts for topic '%s', " + "see ringfile") % (nkey, ) + ) + return [] + return map(lambda x: (key + '.' + x, x), self.ring[nkey]) + + +class MatchMakerRing(mm.MatchMakerBase): + """Match Maker where hosts are loaded from a static hashmap.""" + def __init__(self, ring=None): + super(MatchMakerRing, self).__init__() + self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring)) + self.add_binding(mm.DirectBinding(), mm.DirectExchange()) + self.add_binding(mm.TopicBinding(), RoundRobinRingExchange(ring)) diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py index e85bffd..cda9f61 100644 --- a/billingstack/openstack/common/rpc/proxy.py +++ b/billingstack/openstack/common/rpc/proxy.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2012 Red Hat, Inc. +# Copyright 2012-2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -23,6 +23,8 @@ from billingstack.openstack.common import rpc +from billingstack.openstack.common.rpc import common as rpc_common +from billingstack.openstack.common.rpc import serializer as rpc_serializer class RpcProxy(object): @@ -34,16 +36,28 @@ class RpcProxy(object): rpc API. """ - def __init__(self, topic, default_version): + # The default namespace, which can be overriden in a subclass. + RPC_API_NAMESPACE = None + + def __init__(self, topic, default_version, version_cap=None, + serializer=None): """Initialize an RpcProxy. :param topic: The topic to use for all messages. :param default_version: The default API version to request in all outgoing messages. This can be overridden on a per-message basis. + :param version_cap: Optionally cap the maximum version used for sent + messages. + :param serializer: Optionaly (de-)serialize entities with a + provided helper. """ self.topic = topic self.default_version = default_version + self.version_cap = version_cap + if serializer is None: + serializer = rpc_serializer.NoOpSerializer() + self.serializer = serializer super(RpcProxy, self).__init__() def _set_version(self, msg, vers): @@ -52,19 +66,44 @@ def _set_version(self, msg, vers): :param msg: The message having a version added to it. :param vers: The version number to add to the message. """ - msg['version'] = vers if vers else self.default_version + v = vers if vers else self.default_version + if (self.version_cap and not + rpc_common.version_is_compatible(self.version_cap, v)): + raise rpc_common.RpcVersionCapError(version=self.version_cap) + msg['version'] = v def _get_topic(self, topic): """Return the topic to use for a message.""" return topic if topic else self.topic + def can_send_version(self, version): + """Check to see if a version is compatible with the version cap.""" + return (not self.version_cap or + rpc_common.version_is_compatible(self.version_cap, version)) + @staticmethod def make_namespaced_msg(method, namespace, **kwargs): return {'method': method, 'namespace': namespace, 'args': kwargs} - @staticmethod - def make_msg(method, **kwargs): - return RpcProxy.make_namespaced_msg(method, None, **kwargs) + def make_msg(self, method, **kwargs): + return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE, + **kwargs) + + def _serialize_msg_args(self, context, kwargs): + """Helper method called to serialize message arguments. + + This calls our serializer on each argument, returning a new + set of args that have been serialized. + + :param context: The request context + :param kwargs: The arguments to serialize + :returns: A new set of serialized arguments + """ + new_kwargs = dict() + for argname, arg in kwargs.iteritems(): + new_kwargs[argname] = self.serializer.serialize_entity(context, + arg) + return new_kwargs def call(self, context, msg, topic=None, version=None, timeout=None): """rpc.call() a remote method. @@ -81,9 +120,11 @@ def call(self, context, msg, topic=None, version=None, timeout=None): :returns: The return value from the remote method. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) real_topic = self._get_topic(topic) try: - return rpc.call(context, real_topic, msg, timeout) + result = rpc.call(context, real_topic, msg, timeout) + return self.serializer.deserialize_entity(context, result) except rpc.common.Timeout as exc: raise rpc.common.Timeout( exc.info, real_topic, msg.get('method')) @@ -104,9 +145,11 @@ def multicall(self, context, msg, topic=None, version=None, timeout=None): from the remote method as they arrive. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) real_topic = self._get_topic(topic) try: - return rpc.multicall(context, real_topic, msg, timeout) + result = rpc.multicall(context, real_topic, msg, timeout) + return self.serializer.deserialize_entity(context, result) except rpc.common.Timeout as exc: raise rpc.common.Timeout( exc.info, real_topic, msg.get('method')) @@ -124,6 +167,7 @@ def cast(self, context, msg, topic=None, version=None): remote method. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.cast(context, self._get_topic(topic), msg) def fanout_cast(self, context, msg, topic=None, version=None): @@ -139,6 +183,7 @@ def fanout_cast(self, context, msg, topic=None, version=None): from the remote method. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.fanout_cast(context, self._get_topic(topic), msg) def cast_to_server(self, context, server_params, msg, topic=None, @@ -157,6 +202,7 @@ def cast_to_server(self, context, server_params, msg, topic=None, return values. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.cast_to_server(context, server_params, self._get_topic(topic), msg) def fanout_cast_to_server(self, context, server_params, msg, topic=None, @@ -175,5 +221,6 @@ def fanout_cast_to_server(self, context, server_params, msg, topic=None, return values. """ self._set_version(msg, version) + msg['args'] = self._serialize_msg_args(context, msg['args']) rpc.fanout_cast_to_server(context, server_params, self._get_topic(topic), msg) diff --git a/billingstack/openstack/common/rpc/serializer.py b/billingstack/openstack/common/rpc/serializer.py new file mode 100644 index 0000000..76c6831 --- /dev/null +++ b/billingstack/openstack/common/rpc/serializer.py @@ -0,0 +1,52 @@ +# Copyright 2013 IBM Corp. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Provides the definition of an RPC serialization handler""" + +import abc + + +class Serializer(object): + """Generic (de-)serialization definition base class.""" + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def serialize_entity(self, context, entity): + """Serialize something to primitive form. + + :param context: Security context + :param entity: Entity to be serialized + :returns: Serialized form of entity + """ + pass + + @abc.abstractmethod + def deserialize_entity(self, context, entity): + """Deserialize something from primitive form. + + :param context: Security context + :param entity: Primitive to be deserialized + :returns: Deserialized form of entity + """ + pass + + +class NoOpSerializer(Serializer): + """A serializer that does nothing.""" + + def serialize_entity(self, context, entity): + return entity + + def deserialize_entity(self, context, entity): + return entity diff --git a/billingstack/openstack/common/rpc/service.py b/billingstack/openstack/common/rpc/service.py index c38e3c2..225fcc8 100644 --- a/billingstack/openstack/common/rpc/service.py +++ b/billingstack/openstack/common/rpc/service.py @@ -30,7 +30,8 @@ class Service(service.Service): """Service object for binaries running on hosts. - A service enables rpc by listening to queues based on topic and host.""" + A service enables rpc by listening to queues based on topic and host. + """ def __init__(self, host, topic, manager=None): super(Service, self).__init__() self.host = host @@ -63,7 +64,7 @@ def start(self): self.manager.initialize_service_hook(self) # Consume from all consumers in a thread - self.conn.consume_in_thread_group(self.tg) + self.conn.consume_in_thread() def stop(self): # Try to shut the connection down, but if we get any sort of diff --git a/billingstack/openstack/common/service.py b/billingstack/openstack/common/service.py index 535e65c..8b2dc14 100644 --- a/billingstack/openstack/common/service.py +++ b/billingstack/openstack/common/service.py @@ -52,7 +52,7 @@ def __init__(self): """ self._services = threadgroup.ThreadGroup() - eventlet_backdoor.initialize_if_enabled() + self.backdoor_port = eventlet_backdoor.initialize_if_enabled() @staticmethod def run_service(service): @@ -72,6 +72,7 @@ def launch_service(self, service): :returns: None """ + service.backdoor_port = self.backdoor_port self._services.add_thread(self.run_service, service) def stop(self): @@ -270,7 +271,7 @@ def _wait_child(self): return wrap def wait(self): - """Loop waiting on children to die and respawning as necessary""" + """Loop waiting on children to die and respawning as necessary.""" LOG.debug(_('Full set of CONF:')) CONF.log_opt_values(LOG, std_logging.DEBUG) diff --git a/billingstack/openstack/common/threadgroup.py b/billingstack/openstack/common/threadgroup.py index 60b5c92..e0b867d 100644 --- a/billingstack/openstack/common/threadgroup.py +++ b/billingstack/openstack/common/threadgroup.py @@ -26,7 +26,7 @@ def _thread_done(gt, *args, **kwargs): - """ Callback function to be passed to GreenThread.link() when we spawn() + """Callback function to be passed to GreenThread.link() when we spawn() Calls the :class:`ThreadGroup` to notify if. """ @@ -34,7 +34,7 @@ def _thread_done(gt, *args, **kwargs): class Thread(object): - """ Wrapper around a greenthread, that holds a reference to the + """Wrapper around a greenthread, that holds a reference to the :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when it has done so it can be removed from the threads list. """ @@ -50,7 +50,7 @@ def wait(self): class ThreadGroup(object): - """ The point of the ThreadGroup classis to: + """The point of the ThreadGroup classis to: * keep track of timers and greenthreads (making it easier to stop them when need be). @@ -61,6 +61,13 @@ def __init__(self, thread_pool_size=10): self.threads = [] self.timers = [] + def add_dynamic_timer(self, callback, initial_delay=None, + periodic_interval_max=None, *args, **kwargs): + timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) + timer.start(initial_delay=initial_delay, + periodic_interval_max=periodic_interval_max) + self.timers.append(timer) + def add_timer(self, interval, callback, initial_delay=None, *args, **kwargs): pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) diff --git a/billingstack/openstack/common/timeutils.py b/billingstack/openstack/common/timeutils.py index 6094365..bd60489 100644 --- a/billingstack/openstack/common/timeutils.py +++ b/billingstack/openstack/common/timeutils.py @@ -23,6 +23,7 @@ import datetime import iso8601 +import six # ISO 8601 extended time format with microseconds @@ -32,7 +33,7 @@ def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format""" + """Stringify time in ISO 8601 format.""" if not at: at = utcnow() st = at.strftime(_ISO8601_TIME_FORMAT @@ -44,7 +45,7 @@ def isotime(at=None, subsecond=False): def parse_isotime(timestr): - """Parse time from ISO 8601 format""" + """Parse time from ISO 8601 format.""" try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: @@ -66,7 +67,7 @@ def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object""" + """Normalize time in arbitrary timezone to UTC naive object.""" offset = timestamp.utcoffset() if offset is None: return timestamp @@ -75,14 +76,14 @@ def normalize_time(timestamp): def is_older_than(before, seconds): """Return True if before is older than seconds.""" - if isinstance(before, basestring): + if isinstance(before, six.string_types): before = parse_strtime(before).replace(tzinfo=None) return utcnow() - before > datetime.timedelta(seconds=seconds) def is_newer_than(after, seconds): """Return True if after is newer than seconds.""" - if isinstance(after, basestring): + if isinstance(after, six.string_types): after = parse_strtime(after).replace(tzinfo=None) return after - utcnow() > datetime.timedelta(seconds=seconds) @@ -103,7 +104,7 @@ def utcnow(): def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formated date from timestamp""" + """Returns a iso8601 formated date from timestamp.""" return isotime(datetime.datetime.utcfromtimestamp(timestamp)) @@ -111,9 +112,9 @@ def iso8601_from_timestamp(timestamp): def set_time_override(override_time=datetime.datetime.utcnow()): - """ - Override utils.utcnow to return a constant time or a list thereof, - one at a time. + """Overrides utils.utcnow. + + Make it return a constant time or a list thereof, one at a time. """ utcnow.override_time = override_time @@ -141,7 +142,8 @@ def clear_time_override(): def marshall_now(now=None): """Make an rpc-safe datetime with microseconds. - Note: tzinfo is stripped, but not required for relative times.""" + Note: tzinfo is stripped, but not required for relative times. + """ if not now: now = utcnow() return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, @@ -161,7 +163,8 @@ def unmarshall_time(tyme): def delta_seconds(before, after): - """ + """Return the difference between two timing objects. + Compute the difference in seconds between two date, time, or datetime objects (as a float, to microsecond resolution). """ @@ -174,8 +177,7 @@ def delta_seconds(before, after): def is_soon(dt, window): - """ - Determines if time is going to happen in the next window seconds. + """Determines if time is going to happen in the next window seconds. :params dt: the time :params window: minimum seconds to remain to consider the time not soon diff --git a/tools/pip-requires b/tools/pip-requires index 6d8fb35..115534b 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -1,18 +1,18 @@ d2to1>=0.2.10,<0.3 -pbr>=0.5,<0.6 - +pbr>=0.5.16,<0.6 # This file is managed by openstack-depends argparse -cliff -eventlet +cliff>=1.4 +eventlet>=0.12.0 extras -Flask +flask==0.9 iso8601>=0.1.4 +netaddr oslo.config>=1.1.0 -Paste -PasteDeploy +paste +pastedeploy>=1.5.0 pycountry routes>=1.12.3 -stevedore -WebOb>=1.0.8 -wsme +stevedore>=0.9 +webob>=1.2.3,<1.3 +wsme>=0.5b2 diff --git a/tools/test-requires b/tools/test-requires index 6ae2af5..85a01dc 100644 --- a/tools/test-requires +++ b/tools/test-requires @@ -1,15 +1,14 @@ # This file is managed by openstack-depends -Babel>=0.9.6 -coverage -docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. -flake8 -mock -mox +coverage>=3.6 +docutils==0.9.1 +flake8==2.0 +mock>=0.8.0 +mox>=0.5.3 nose -nosehtmloutput -openstack.nose_plugin +nosehtmloutput>=0.0.3 +openstack.nose_plugin>=0.7 python-subunit -sphinx +sphinx>=1.1.2 sphinxcontrib-httpdomain -testrepository +testrepository>=0.0.13 unittest2 From 2c237b5f5278f5d5d4491ff5eacf7c35b1067be6 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 3 Jul 2013 22:57:48 +0200 Subject: [PATCH 164/182] Carry oslo-incubator review #34949. Change-Id: I64cbecfc05e19c997411b1c7c3e0790345e2a185 --- billingstack/openstack/common/rpc/impl_kombu.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index b5197be..2caaa0e 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -83,6 +83,9 @@ default=0, help='maximum retries with trying to connect to RabbitMQ ' '(the default of 0 implies an infinite retry count)'), + cfg.IntOpt('rabbit_heartbeat', + default=0, + help='Seconds between connection keepalive heartbeats'), cfg.BoolOpt('rabbit_durable_queues', default=False, help='use durable queues in RabbitMQ'), @@ -450,6 +453,7 @@ def __init__(self, conf, server_params=None): 'userid': self.conf.rabbit_userid, 'password': self.conf.rabbit_password, 'virtual_host': self.conf.rabbit_virtual_host, + 'heartbeat': self.conf.rabbit_heartbeat, } for sp_key, value in server_params.iteritems(): From 2f53dc2f673beb89e8fb846fd671056892fa8851 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 4 Jul 2013 14:43:07 +0200 Subject: [PATCH 165/182] consumder_thread.wait() Change-Id: Ia4275c9e911e36c1ea3fd409c43787f4ec1e6ab5 --- billingstack/biller/service.py | 4 ++++ billingstack/central/service.py | 4 ++++ billingstack/collector/service.py | 4 ++++ billingstack/rater/service.py | 4 ++++ 4 files changed, 16 insertions(+) diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py index cac82be..bedc8c6 100644 --- a/billingstack/biller/service.py +++ b/billingstack/biller/service.py @@ -46,6 +46,10 @@ def start(self): self.storage_conn = get_connection('biller') super(Service, self).start() + def wait(self): + super(Service, self).wait() + self.conn.consumer_thread.wait() + def create_invoice_state(self, ctxt, values): return self.storage_conn.create_invoice_state(ctxt, values) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index a675874..f7eb987 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -44,6 +44,10 @@ def start(self): self.storage_conn = get_connection('central') super(Service, self).start() + def wait(self): + super(Service, self).wait() + self.conn.consumer_thread.wait() + def __getattr__(self, name): """ Proxy onto the storage api if there is no local method present.. diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index d8d962a..7a1b9a8 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -48,6 +48,10 @@ def __init__(self, *args, **kwargs): # Get a storage connection self.central_api = CentralAPI() + def wait(self): + super(Service, self).wait() + self.conn.consumer_thread.wait() + def get_pg_provider(self, ctxt, pg_info): """ Work out a PGC config either from pg_info or via ctxt fetching it diff --git a/billingstack/rater/service.py b/billingstack/rater/service.py index 351e80d..652a134 100644 --- a/billingstack/rater/service.py +++ b/billingstack/rater/service.py @@ -50,6 +50,10 @@ def start(self): self.storage_conn = get_connection('rater') super(Service, self).start() + def wait(self): + super(Service, self).wait() + self.conn.consumer_thread.wait() + def create_usage(self, ctxt, values): return self.storage_conn.create_usage(ctxt, values) From d31edd3a6d64f94cbab15ef7193ad92687811684 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 5 Jul 2013 15:03:20 +0200 Subject: [PATCH 166/182] Add some comments and remove unnused code Change-Id: I6ad4094c3dae5e99aa885d9a00905fc96f37cc34 --- billingstack/central/service.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index f7eb987..5dfefb0 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -67,6 +67,7 @@ def _wrapper(*args, **kw): setattr(self, name, _wrapper) return _wrapper + # Currency def create_currency(self, ctxt, values): return self.storage_conn.create_currency(ctxt, values) @@ -82,6 +83,7 @@ def update_currency(self, ctxt, id_, values): def delete_currency(self, ctxt, id_): return self.storage_conn.delete_currency(ctxt, id_) + # Language def create_language(self, ctxt, values): return self.storage_conn.create_language(ctxt, values) @@ -97,7 +99,7 @@ def update_language(self, ctxt, id_, values): def delete_language(self, ctxt, id_): return self.storage_conn.delete_language(ctxt, id_) - # TODO Fix + # Contact Info def create_contact_info(self, ctxt, obj, values, cls=None, rel_attr='contact_info'): return self.storage_conn.create_contact_info(ctxt, values) @@ -111,27 +113,14 @@ def update_contact_info(self, ctxt, id_, values): def delete_contact_info(self, ctxt, id_): return self.storage_conn.delete_contact_info(ctxt, id_) + # PGP def list_pg_providers(self, ctxt, **kw): return self.storage_conn.list_pg_providers(ctxt, **kw) def get_pg_provider(self, ctxt, pgp_id): return self.storage_conn.get_pg_provider(ctxt, pgp_id) - def create_pg_method(self, ctxt, values): - return self.storage_conn.create_pg_method(ctxt, values) - - def list_pg_methods(self, ctxt, **kw): - return self.storage_conn.list_pg_methods(ctxt, **kw) - - def get_pg_method(self, ctxt, id_): - return self.storage_conn.get_pg_method(ctxt, id_) - - def update_pg_method(self, ctxt, id_, values): - return self.storage_conn.update_pg_method(ctxt, id_, values) - - def delete_pg_method(self, ctxt, id_): - return self.storage_conn.delete_pg_method(ctxt, id_) - + # PGC def create_pg_config(self, ctxt, merchant_id, values): return self.storage_conn.create_pg_config(ctxt, merchant_id, values) @@ -147,6 +136,7 @@ def update_pg_config(self, ctxt, id_, values): def delete_pg_config(self, ctxt, id_): return self.storage_conn.delete_pg_config(ctxt, id_) + # PM def create_payment_method(self, ctxt, customer_id, values): return self.storage_conn.create_payment_method( ctxt, customer_id, values) @@ -163,6 +153,7 @@ def update_payment_method(self, ctxt, id_, values): def delete_payment_method(self, ctxt, id_): return self.storage_conn.delete_payment_method(ctxt, id_) + # Merchant def create_merchant(self, ctxt, values): return self.storage_conn.create_merchant(ctxt, values) @@ -178,6 +169,7 @@ def update_merchant(self, ctxt, id_, values): def delete_merchant(self, ctxt, id_): return self.storage_conn.delete_merchant(ctxt, id_) + # Customer def create_customer(self, ctxt, merchant_id, values): return self.storage_conn.create_customer(ctxt, merchant_id, values) @@ -193,6 +185,7 @@ def update_customer(self, ctxt, id_, values): def delete_customer(self, ctxt, id_): return self.storage_conn.delete_customer(ctxt, id_) + # Plans def create_plan(self, ctxt, merchant_id, values): return self.storage_conn.create_plan(ctxt, merchant_id, values) @@ -211,6 +204,7 @@ def delete_plan(self, ctxt, id_): def get_plan_by_subscription(self, ctxt, id_): return self.storage_conn.get_plan_by_subscription(ctxt, id_) + # PlanItems def create_plan_item(self, ctxt, values): return self.storage_conn.create_plan_item(ctxt, values) @@ -227,6 +221,7 @@ def update_plan_item(self, ctxt, plan_id, product_id, values): def delete_plan_item(self, ctxt, plan_id, product_id): return self.storage_conn.delete_plan_item(ctxt, plan_id, product_id) + # Products def create_product(self, ctxt, merchant_id, values): return self.storage_conn.create_product(ctxt, merchant_id, values) @@ -242,6 +237,7 @@ def update_product(self, ctxt, id_, values): def delete_product(self, ctxt, id_): return self.storage_conn.delete_product(ctxt, id_) + # Subscriptions def create_subscription(self, ctxt, values): return self.storage_conn.create_subscription(ctxt, values) From 4cb0761877d72486c36630308fa97acbfef5a769 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 5 Jul 2013 15:40:07 +0200 Subject: [PATCH 167/182] Ignore .codeintel Change-Id: I5223c8eb92a60766ecf60d3c800687baa7623467 --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d04a09b..e764b05 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ nosetests.xml .project .pydevproject .venv +.codeintel doc/source/api/* doc/build/* From 599bffc54bcc52ca4a94bb776d5200089b4a187b Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 25 Jul 2013 18:38:56 +0000 Subject: [PATCH 168/182] Update ignore and add discovery... Change-Id: I792b9e1d775b81ef7689eae460413d1bf6a8e4d4 --- .gitignore | 4 +++- tools/test-requires | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index e764b05..960575b 100644 --- a/.gitignore +++ b/.gitignore @@ -51,4 +51,6 @@ billingstack/versioninfo billingstack-screenrc -run/ +status +logs +.ropeproject diff --git a/tools/test-requires b/tools/test-requires index 85a01dc..b667164 100644 --- a/tools/test-requires +++ b/tools/test-requires @@ -1,5 +1,6 @@ # This file is managed by openstack-depends coverage>=3.6 +discover docutils==0.9.1 flake8==2.0 mock>=0.8.0 From 6cfcec5404bea8b8eae8e13b04aa3525ce983fa8 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 28 Jul 2013 14:46:14 +0000 Subject: [PATCH 169/182] Remove usage of __getattr__ to proxy onto storage Change-Id: I5ef5fbb587ca263f1310d538f5e1328fe69701bb --- billingstack/central/service.py | 20 -------------------- billingstack/collector/service.py | 20 -------------------- billingstack/tests/base.py | 3 ++- 3 files changed, 2 insertions(+), 41 deletions(-) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 5dfefb0..6f038db 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -13,7 +13,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -import functools import sys from oslo.config import cfg @@ -48,25 +47,6 @@ def wait(self): super(Service, self).wait() self.conn.consumer_thread.wait() - def __getattr__(self, name): - """ - Proxy onto the storage api if there is no local method present.. - - For now to avoid to have to write up every method once more here... - """ - if hasattr(self, name): - return getattr(self, name) - - f = getattr(self.storage_conn, name) - if not f: - raise AttributeError - - @functools.wraps(f) - def _wrapper(*args, **kw): - return f(*args, **kw) - setattr(self, name, _wrapper) - return _wrapper - # Currency def create_currency(self, ctxt, values): return self.storage_conn.create_currency(ctxt, values) diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index 7a1b9a8..dc5e910 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -17,7 +17,6 @@ A service that does calls towards the PGP web endpoint or so """ -import functools import sys from oslo.config import cfg @@ -69,25 +68,6 @@ def create_account(self, ctxt, values, pg_config=None): :param values: The account values """ - def __getattr__(self, name): - """ - Proxy onto the storage api if there is no local method present.. - - For now to avoid to have to write up every method once more here... - """ - if hasattr(self, name): - return getattr(self, name) - - f = getattr(self.provider, name) - if not f: - raise AttributeError - - @functools.wraps(f) - def _wrapper(*args, **kw): - return f(*args, **kw) - setattr(self, name, _wrapper) - return _wrapper - def launch(): bs_service.prepare_service(sys.argv) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 44cedd9..dd482fe 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -420,7 +420,8 @@ def pg_provider_register(self, fixture=0, values={}, **kw): fixture['methods'] = [self.get_fixture('pg_method')] ctxt = kw.pop('context', self.admin_ctxt) - data = self.services.central.pg_provider_register(ctxt, fixture, **kw) + data = self.services.central.storage_conn.pg_provider_register( + ctxt, fixture, **kw) return fixture, data From d58917d5735575b8e3f04ebf05f0fce3495792bc Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 28 Jul 2013 15:17:16 +0000 Subject: [PATCH 170/182] Sync oslo Change-Id: I4bd4e03857e7812748a32110c401451a9617c560 --- billingstack/openstack/common/context.py | 2 +- billingstack/openstack/common/db/exception.py | 2 +- .../openstack/common/eventlet_backdoor.py | 18 +- billingstack/openstack/common/exception.py | 26 +-- billingstack/openstack/common/excutils.py | 40 +++- billingstack/openstack/common/fileutils.py | 2 +- billingstack/openstack/common/gettextutils.py | 37 ++- billingstack/openstack/common/lockutils.py | 185 ++++++++------- billingstack/openstack/common/log.py | 12 +- billingstack/openstack/common/loopingcall.py | 2 +- billingstack/openstack/common/notifier/api.py | 39 ++-- .../openstack/common/notifier/rpc_notifier.py | 2 +- .../common/notifier/rpc_notifier2.py | 2 +- billingstack/openstack/common/processutils.py | 2 +- billingstack/openstack/common/rpc/__init__.py | 2 +- billingstack/openstack/common/rpc/amqp.py | 16 +- billingstack/openstack/common/rpc/common.py | 71 ++---- .../openstack/common/rpc/impl_kombu.py | 41 ++-- .../openstack/common/rpc/impl_qpid.py | 30 ++- billingstack/openstack/common/rpc/impl_zmq.py | 2 +- .../openstack/common/rpc/matchmaker.py | 2 +- .../openstack/common/rpc/matchmaker_ring.py | 2 +- billingstack/openstack/common/rpc/proxy.py | 2 +- billingstack/openstack/common/rpc/service.py | 8 +- billingstack/openstack/common/service.py | 217 ++++++++++++++---- billingstack/openstack/common/sslutils.py | 24 +- billingstack/openstack/common/threadgroup.py | 6 +- 27 files changed, 483 insertions(+), 311 deletions(-) diff --git a/billingstack/openstack/common/context.py b/billingstack/openstack/common/context.py index a236bdd..401c9dd 100644 --- a/billingstack/openstack/common/context.py +++ b/billingstack/openstack/common/context.py @@ -61,7 +61,7 @@ def to_dict(self): 'request_id': self.request_id} -def get_admin_context(show_deleted="no"): +def get_admin_context(show_deleted=False): context = RequestContext(None, tenant=None, is_admin=True, diff --git a/billingstack/openstack/common/db/exception.py b/billingstack/openstack/common/db/exception.py index a1fe3a3..0a231cf 100644 --- a/billingstack/openstack/common/db/exception.py +++ b/billingstack/openstack/common/db/exception.py @@ -18,7 +18,7 @@ """DB related custom exceptions.""" -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa class DBError(Exception): diff --git a/billingstack/openstack/common/eventlet_backdoor.py b/billingstack/openstack/common/eventlet_backdoor.py index 01bc984..e7d550a 100644 --- a/billingstack/openstack/common/eventlet_backdoor.py +++ b/billingstack/openstack/common/eventlet_backdoor.py @@ -31,20 +31,20 @@ import greenlet from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging -help_for_backdoor_port = 'Acceptable ' + \ - 'values are 0, and :, where 0 results in ' + \ - 'listening on a random tcp port number, results in ' + \ - 'listening on the specified port number and not enabling backdoor' + \ - 'if it is in use and : results in listening on the ' + \ - 'smallest unused port number within the specified range of port ' + \ - 'numbers. The chosen port is displayed in the service\'s log file.' +help_for_backdoor_port = ( + "Acceptable values are 0, , and :, where 0 results " + "in listening on a random tcp port number; results in listening " + "on the specified port number (and not enabling backdoor if that port " + "is in use); and : results in listening on the smallest " + "unused port number within the specified range of port numbers. The " + "chosen port is displayed in the service's log file.") eventlet_backdoor_opts = [ cfg.StrOpt('backdoor_port', default=None, - help='Enable eventlet backdoor. %s' % help_for_backdoor_port) + help="Enable eventlet backdoor. %s" % help_for_backdoor_port) ] CONF = cfg.CONF diff --git a/billingstack/openstack/common/exception.py b/billingstack/openstack/common/exception.py index 96463a1..df4c277 100644 --- a/billingstack/openstack/common/exception.py +++ b/billingstack/openstack/common/exception.py @@ -21,7 +21,7 @@ import logging -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa _FATAL_EXCEPTION_FORMAT_ERRORS = False @@ -33,7 +33,7 @@ def __init__(self, message=None): class ApiError(Error): def __init__(self, message='Unknown', code='Unknown'): - self.message = message + self.api_message = message self.code = code super(ApiError, self).__init__('%s: %s' % (code, message)) @@ -44,19 +44,19 @@ class NotFound(Error): class UnknownScheme(Error): - msg = "Unknown scheme '%s' found in URI" + msg_fmt = "Unknown scheme '%s' found in URI" def __init__(self, scheme): - msg = self.__class__.msg % scheme + msg = self.msg_fmt % scheme super(UnknownScheme, self).__init__(msg) class BadStoreUri(Error): - msg = "The Store URI %s was malformed. Reason: %s" + msg_fmt = "The Store URI %s was malformed. Reason: %s" def __init__(self, uri, reason): - msg = self.__class__.msg % (uri, reason) + msg = self.msg_fmt % (uri, reason) super(BadStoreUri, self).__init__(msg) @@ -100,9 +100,7 @@ def _wrap(*args, **kw): return f(*args, **kw) except Exception as e: if not isinstance(e, Error): - #exc_type, exc_value, exc_traceback = sys.exc_info() logging.exception(_('Uncaught exception')) - #logging.error(traceback.extract_stack(exc_traceback)) raise Error(str(e)) raise _wrap.func_name = f.func_name @@ -113,29 +111,29 @@ class OpenstackException(Exception): """Base Exception class. To correctly use this class, inherit from it and define - a 'message' property. That message will get printf'd + a 'msg_fmt' property. That message will get printf'd with the keyword arguments provided to the constructor. """ - message = "An unknown exception occurred" + msg_fmt = "An unknown exception occurred" def __init__(self, **kwargs): try: - self._error_string = self.message % kwargs + self._error_string = self.msg_fmt % kwargs except Exception: if _FATAL_EXCEPTION_FORMAT_ERRORS: raise else: # at least get the core message out if something happened - self._error_string = self.message + self._error_string = self.msg_fmt def __str__(self): return self._error_string class MalformedRequestBody(OpenstackException): - message = "Malformed message body: %(reason)s" + msg_fmt = "Malformed message body: %(reason)s" class InvalidContentType(OpenstackException): - message = "Invalid content type %(content_type)s" + msg_fmt = "Invalid content type %(content_type)s" diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py index d61a8c8..31c3d33 100644 --- a/billingstack/openstack/common/excutils.py +++ b/billingstack/openstack/common/excutils.py @@ -19,17 +19,15 @@ Exception related utilities. """ -import contextlib import logging import sys import time import traceback -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa -@contextlib.contextmanager -def save_and_reraise_exception(): +class save_and_reraise_exception(object): """Save current exception, run some code and then re-raise. In some cases the exception context can be cleared, resulting in None @@ -41,15 +39,33 @@ def save_and_reraise_exception(): To work around this, we save the exception state, run handler code, and then re-raise the original exception. If another exception occurs, the saved exception is logged and the new exception is re-raised. - """ - type_, value, tb = sys.exc_info() - try: - yield + + In some cases the caller may not want to re-raise the exception, and + for those circumstances this context provides a reraise flag that + can be used to suppress the exception. For example: + except Exception: - logging.error(_('Original exception being dropped: %s'), - traceback.format_exception(type_, value, tb)) - raise - raise type_, value, tb + with save_and_reraise_exception() as ctxt: + decide_if_need_reraise() + if not should_be_reraised: + ctxt.reraise = False + """ + def __init__(self): + self.reraise = True + + def __enter__(self): + self.type_, self.value, self.tb, = sys.exc_info() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None: + logging.error(_('Original exception being dropped: %s'), + traceback.format_exception(self.type_, + self.value, + self.tb)) + return False + if self.reraise: + raise self.type_, self.value, self.tb def forever_retry_uncaught_exceptions(infunc): diff --git a/billingstack/openstack/common/fileutils.py b/billingstack/openstack/common/fileutils.py index f17e3f7..4e5ee68 100644 --- a/billingstack/openstack/common/fileutils.py +++ b/billingstack/openstack/common/fileutils.py @@ -21,7 +21,7 @@ import os from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging LOG = logging.getLogger(__name__) diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py index b2bb74d..185aa05 100644 --- a/billingstack/openstack/common/gettextutils.py +++ b/billingstack/openstack/common/gettextutils.py @@ -28,8 +28,11 @@ import gettext import logging.handlers import os +import re import UserString +import six + _localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') _t = gettext.translation('billingstack', localedir=_localedir, fallback=True) @@ -120,7 +123,29 @@ def data(self): if self.params is not None: full_msg = full_msg % self.params - return unicode(full_msg) + return six.text_type(full_msg) + + def _save_dictionary_parameter(self, dict_param): + full_msg = self.data + # look for %(blah) fields in string; + # ignore %% and deal with the + # case where % is first character on the line + keys = re.findall('(?:[^%]|^)%\((\w*)\)[a-z]', full_msg) + + # if we don't find any %(blah) blocks but have a %s + if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg): + # apparently the full dictionary is the parameter + params = copy.deepcopy(dict_param) + else: + params = {} + for key in keys: + try: + params[key] = copy.deepcopy(dict_param[key]) + except TypeError: + # cast uncopyable thing to unicode string + params[key] = unicode(dict_param[key]) + + return params def _save_parameters(self, other): # we check for None later to see if @@ -128,8 +153,16 @@ def _save_parameters(self, other): # so encapsulate if our parameter is actually None if other is None: self.params = (other, ) + elif isinstance(other, dict): + self.params = self._save_dictionary_parameter(other) else: - self.params = copy.deepcopy(other) + # fallback to casting to unicode, + # this will handle the problematic python code-like + # objects that cannot be deep-copied + try: + self.params = copy.deepcopy(other) + except TypeError: + self.params = unicode(other) return self diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py index 2903167..d57920d 100644 --- a/billingstack/openstack/common/lockutils.py +++ b/billingstack/openstack/common/lockutils.py @@ -16,11 +16,10 @@ # under the License. +import contextlib import errno import functools import os -import shutil -import tempfile import time import weakref @@ -28,7 +27,7 @@ from oslo.config import cfg from billingstack.openstack.common import fileutils -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import local from billingstack.openstack.common import log as logging @@ -40,8 +39,7 @@ cfg.BoolOpt('disable_process_locking', default=False, help='Whether to disable inter-process locks'), cfg.StrOpt('lock_path', - help=('Directory to use for lock files. Default to a ' - 'temp directory')) + help=('Directory to use for lock files.')) ] @@ -135,7 +133,87 @@ def unlock(self): _semaphores = weakref.WeakValueDictionary() -def synchronized(name, lock_file_prefix, external=False, lock_path=None): +@contextlib.contextmanager +def lock(name, lock_file_prefix=None, external=False, lock_path=None): + """Context based lock + + This function yields a `semaphore.Semaphore` instance unless external is + True, in which case, it'll yield an InterProcessLock instance. + + :param lock_file_prefix: The lock_file_prefix argument is used to provide + lock files on disk with a meaningful prefix. + + :param external: The external keyword argument denotes whether this lock + should work across multiple processes. This means that if two different + workers both run a a method decorated with @synchronized('mylock', + external=True), only one of them will execute at a time. + + :param lock_path: The lock_path keyword argument is used to specify a + special location for external lock files to live. If nothing is set, then + CONF.lock_path is used as a default. + """ + # NOTE(soren): If we ever go natively threaded, this will be racy. + # See http://stackoverflow.com/questions/5390569/dyn + # amically-allocating-and-destroying-mutexes + sem = _semaphores.get(name, semaphore.Semaphore()) + if name not in _semaphores: + # this check is not racy - we're already holding ref locally + # so GC won't remove the item and there was no IO switch + # (only valid in greenthreads) + _semaphores[name] = sem + + with sem: + LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) + + # NOTE(mikal): I know this looks odd + if not hasattr(local.strong_store, 'locks_held'): + local.strong_store.locks_held = [] + local.strong_store.locks_held.append(name) + + try: + if external and not CONF.disable_process_locking: + LOG.debug(_('Attempting to grab file lock "%(lock)s"'), + {'lock': name}) + + # We need a copy of lock_path because it is non-local + local_lock_path = lock_path or CONF.lock_path + if not local_lock_path: + raise cfg.RequiredOptError('lock_path') + + if not os.path.exists(local_lock_path): + fileutils.ensure_tree(local_lock_path) + LOG.info(_('Created lock path: %s'), local_lock_path) + + def add_prefix(name, prefix): + if not prefix: + return name + sep = '' if prefix.endswith('-') else '-' + return '%s%s%s' % (prefix, sep, name) + + # NOTE(mikal): the lock name cannot contain directory + # separators + lock_file_name = add_prefix(name.replace(os.sep, '_'), + lock_file_prefix) + + lock_file_path = os.path.join(local_lock_path, lock_file_name) + + try: + lock = InterProcessLock(lock_file_path) + with lock as lock: + LOG.debug(_('Got file lock "%(lock)s" at %(path)s'), + {'lock': name, 'path': lock_file_path}) + yield lock + finally: + LOG.debug(_('Released file lock "%(lock)s" at %(path)s'), + {'lock': name, 'path': lock_file_path}) + else: + yield sem + + finally: + local.strong_store.locks_held.remove(name) + + +def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): """Synchronization decorator. Decorating a method like so:: @@ -157,99 +235,18 @@ def bar(self, *args): ... This way only one of either foo or bar can be executing at a time. - - :param lock_file_prefix: The lock_file_prefix argument is used to provide - lock files on disk with a meaningful prefix. The prefix should end with a - hyphen ('-') if specified. - - :param external: The external keyword argument denotes whether this lock - should work across multiple processes. This means that if two different - workers both run a a method decorated with @synchronized('mylock', - external=True), only one of them will execute at a time. - - :param lock_path: The lock_path keyword argument is used to specify a - special location for external lock files to live. If nothing is set, then - CONF.lock_path is used as a default. """ def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): - # NOTE(soren): If we ever go natively threaded, this will be racy. - # See http://stackoverflow.com/questions/5390569/dyn - # amically-allocating-and-destroying-mutexes - sem = _semaphores.get(name, semaphore.Semaphore()) - if name not in _semaphores: - # this check is not racy - we're already holding ref locally - # so GC won't remove the item and there was no IO switch - # (only valid in greenthreads) - _semaphores[name] = sem - - with sem: - LOG.debug(_('Got semaphore "%(lock)s" for method ' - '"%(method)s"...'), {'lock': name, - 'method': f.__name__}) - - # NOTE(mikal): I know this looks odd - if not hasattr(local.strong_store, 'locks_held'): - local.strong_store.locks_held = [] - local.strong_store.locks_held.append(name) - - try: - if external and not CONF.disable_process_locking: - LOG.debug(_('Attempting to grab file lock "%(lock)s" ' - 'for method "%(method)s"...'), - {'lock': name, 'method': f.__name__}) - cleanup_dir = False - - # We need a copy of lock_path because it is non-local - local_lock_path = lock_path - if not local_lock_path: - local_lock_path = CONF.lock_path - - if not local_lock_path: - cleanup_dir = True - local_lock_path = tempfile.mkdtemp() - - if not os.path.exists(local_lock_path): - fileutils.ensure_tree(local_lock_path) - - # NOTE(mikal): the lock name cannot contain directory - # separators - safe_name = name.replace(os.sep, '_') - lock_file_name = '%s%s' % (lock_file_prefix, safe_name) - lock_file_path = os.path.join(local_lock_path, - lock_file_name) - - try: - lock = InterProcessLock(lock_file_path) - with lock: - LOG.debug(_('Got file lock "%(lock)s" at ' - '%(path)s for method ' - '"%(method)s"...'), - {'lock': name, - 'path': lock_file_path, - 'method': f.__name__}) - retval = f(*args, **kwargs) - finally: - LOG.debug(_('Released file lock "%(lock)s" at ' - '%(path)s for method "%(method)s"...'), - {'lock': name, - 'path': lock_file_path, - 'method': f.__name__}) - # NOTE(vish): This removes the tempdir if we needed - # to create one. This is used to - # cleanup the locks left behind by unit - # tests. - if cleanup_dir: - shutil.rmtree(local_lock_path) - else: - retval = f(*args, **kwargs) - - finally: - local.strong_store.locks_held.remove(name) + with lock(name, lock_file_prefix, external, lock_path): + LOG.debug(_('Got semaphore / lock "%(function)s"'), + {'function': f.__name__}) + return f(*args, **kwargs) - return retval + LOG.debug(_('Semaphore / lock released "%(function)s"'), + {'function': f.__name__}) return inner return wrap @@ -273,7 +270,7 @@ def bar(self, *args): ... The lock_file_prefix argument is used to provide lock files on disk with a - meaningful prefix. The prefix should end with a hyphen ('-') if specified. + meaningful prefix. """ return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) diff --git a/billingstack/openstack/common/log.py b/billingstack/openstack/common/log.py index c4f4185..246d4de 100644 --- a/billingstack/openstack/common/log.py +++ b/billingstack/openstack/common/log.py @@ -29,8 +29,6 @@ """ -import ConfigParser -import cStringIO import inspect import itertools import logging @@ -41,8 +39,9 @@ import traceback from oslo.config import cfg +from six import moves -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils from billingstack.openstack.common import local @@ -74,7 +73,8 @@ cfg.StrOpt('log-format', default=None, metavar='FORMAT', - help='A logging.Formatter log message format string which may ' + help='DEPRECATED. ' + 'A logging.Formatter log message format string which may ' 'use any of the available logging.LogRecord attributes. ' 'This option is deprecated. Please use ' 'logging_context_format_string and ' @@ -347,7 +347,7 @@ def __str__(self): def _load_log_config(log_config): try: logging.config.fileConfig(log_config) - except ConfigParser.Error as exc: + except moves.configparser.Error as exc: raise LogConfigError(log_config, str(exc)) @@ -520,7 +520,7 @@ def formatException(self, exc_info, record=None): if not record: return logging.Formatter.formatException(self, exc_info) - stringbuffer = cStringIO.StringIO() + stringbuffer = moves.StringIO() traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, stringbuffer) lines = stringbuffer.getvalue().split('\n') diff --git a/billingstack/openstack/common/loopingcall.py b/billingstack/openstack/common/loopingcall.py index 1976bf9..a8de8f8 100644 --- a/billingstack/openstack/common/loopingcall.py +++ b/billingstack/openstack/common/loopingcall.py @@ -22,7 +22,7 @@ from eventlet import event from eventlet import greenthread -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging from billingstack.openstack.common import timeutils diff --git a/billingstack/openstack/common/notifier/api.py b/billingstack/openstack/common/notifier/api.py index 565f454..894f1cb 100644 --- a/billingstack/openstack/common/notifier/api.py +++ b/billingstack/openstack/common/notifier/api.py @@ -13,12 +13,13 @@ # License for the specific language governing permissions and limitations # under the License. +import socket import uuid from oslo.config import cfg from billingstack.openstack.common import context -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils from billingstack.openstack.common import log as logging @@ -35,7 +36,7 @@ default='INFO', help='Default notification level for outgoing notifications'), cfg.StrOpt('default_publisher_id', - default='$host', + default=None, help='Default publisher_id for outgoing notifications'), ] @@ -74,7 +75,7 @@ def wrapped_func(*args, **kwarg): ctxt = context.get_context_from_function_and_args(fn, args, kwarg) notify(ctxt, - CONF.default_publisher_id, + CONF.default_publisher_id or socket.gethostname(), name, CONF.default_notification_level, body) @@ -84,7 +85,10 @@ def wrapped_func(*args, **kwarg): def publisher_id(service, host=None): if not host: - host = CONF.host + try: + host = CONF.host + except AttributeError: + host = CONF.default_publisher_id or socket.gethostname() return "%s.%s" % (service, host) @@ -153,29 +157,16 @@ def _get_drivers(): if _drivers is None: _drivers = {} for notification_driver in CONF.notification_driver: - add_driver(notification_driver) - + try: + driver = importutils.import_module(notification_driver) + _drivers[notification_driver] = driver + except ImportError: + LOG.exception(_("Failed to load notifier %s. " + "These notifications will not be sent.") % + notification_driver) return _drivers.values() -def add_driver(notification_driver): - """Add a notification driver at runtime.""" - # Make sure the driver list is initialized. - _get_drivers() - if isinstance(notification_driver, basestring): - # Load and add - try: - driver = importutils.import_module(notification_driver) - _drivers[notification_driver] = driver - except ImportError: - LOG.exception(_("Failed to load notifier %s. " - "These notifications will not be sent.") % - notification_driver) - else: - # Driver is already loaded; just add the object. - _drivers[notification_driver] = notification_driver - - def _reset_drivers(): """Used by unit tests to reset the drivers.""" global _drivers diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py index 3c3e690..ac0e3ed 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ b/billingstack/openstack/common/notifier/rpc_notifier.py @@ -16,7 +16,7 @@ from oslo.config import cfg from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging from billingstack.openstack.common import rpc diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py index b7bc56e..41f8d68 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ b/billingstack/openstack/common/notifier/rpc_notifier2.py @@ -18,7 +18,7 @@ from oslo.config import cfg from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging from billingstack.openstack.common import rpc diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py index 1fcb22d..e0c298a 100644 --- a/billingstack/openstack/common/processutils.py +++ b/billingstack/openstack/common/processutils.py @@ -27,7 +27,7 @@ from eventlet.green import subprocess from eventlet import greenthread -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging diff --git a/billingstack/openstack/common/rpc/__init__.py b/billingstack/openstack/common/rpc/__init__.py index 45b842e..9c40b70 100644 --- a/billingstack/openstack/common/rpc/__init__.py +++ b/billingstack/openstack/common/rpc/__init__.py @@ -29,7 +29,7 @@ from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import local from billingstack.openstack.common import log as logging diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index feb164e..3e2f850 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -34,14 +34,28 @@ from eventlet import pools from eventlet import queue from eventlet import semaphore +from oslo.config import cfg from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import local from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import common as rpc_common +amqp_opts = [ + cfg.BoolOpt('amqp_durable_queues', + default=False, + deprecated_name='rabbit_durable_queues', + deprecated_group='DEFAULT', + help='Use durable queues in amqp.'), + cfg.BoolOpt('amqp_auto_delete', + default=False, + help='Auto-delete queues in amqp.'), +] + +cfg.CONF.register_opts(amqp_opts) + UNIQUE_ID = '_unique_id' LOG = logging.getLogger(__name__) diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py index 1992401..b8bc17a 100644 --- a/billingstack/openstack/common/rpc/common.py +++ b/billingstack/openstack/common/rpc/common.py @@ -24,7 +24,7 @@ from oslo.config import cfg import six -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils from billingstack.openstack.common import local @@ -74,14 +74,14 @@ class RPCException(Exception): - message = _("An unknown RPC related exception occurred.") + msg_fmt = _("An unknown RPC related exception occurred.") def __init__(self, message=None, **kwargs): self.kwargs = kwargs if not message: try: - message = self.message % kwargs + message = self.msg_fmt % kwargs except Exception: # kwargs doesn't match a variable in the message @@ -90,7 +90,7 @@ def __init__(self, message=None, **kwargs): for name, value in kwargs.iteritems(): LOG.error("%s: %s" % (name, value)) # at least get the core message out if something happened - message = self.message + message = self.msg_fmt super(RPCException, self).__init__(message) @@ -104,7 +104,7 @@ class RemoteError(RPCException): contains all of the relevant info. """ - message = _("Remote error: %(exc_type)s %(value)s\n%(traceback)s.") + msg_fmt = _("Remote error: %(exc_type)s %(value)s\n%(traceback)s.") def __init__(self, exc_type=None, value=None, traceback=None): self.exc_type = exc_type @@ -121,7 +121,7 @@ class Timeout(RPCException): This exception is raised if the rpc_response_timeout is reached while waiting for a response from the remote side. """ - message = _('Timeout while waiting on RPC response - ' + msg_fmt = _('Timeout while waiting on RPC response - ' 'topic: "%(topic)s", RPC method: "%(method)s" ' 'info: "%(info)s"') @@ -144,25 +144,25 @@ def __init__(self, info=None, topic=None, method=None): class DuplicateMessageError(RPCException): - message = _("Found duplicate message(%(msg_id)s). Skipping it.") + msg_fmt = _("Found duplicate message(%(msg_id)s). Skipping it.") class InvalidRPCConnectionReuse(RPCException): - message = _("Invalid reuse of an RPC connection.") + msg_fmt = _("Invalid reuse of an RPC connection.") class UnsupportedRpcVersion(RPCException): - message = _("Specified RPC version, %(version)s, not supported by " + msg_fmt = _("Specified RPC version, %(version)s, not supported by " "this endpoint.") class UnsupportedRpcEnvelopeVersion(RPCException): - message = _("Specified RPC envelope version, %(version)s, " + msg_fmt = _("Specified RPC envelope version, %(version)s, " "not supported by this endpoint.") class RpcVersionCapError(RPCException): - message = _("Specified RPC version cap, %(version_cap)s, is too low") + msg_fmt = _("Specified RPC version cap, %(version_cap)s, is too low") class Connection(object): @@ -261,41 +261,20 @@ def consume_in_thread(self): def _safe_log(log_func, msg, msg_data): """Sanitizes the msg_data field before logging.""" - SANITIZE = {'set_admin_password': [('args', 'new_pass')], - 'run_instance': [('args', 'admin_password')], - 'route_message': [('args', 'message', 'args', 'method_info', - 'method_kwargs', 'password'), - ('args', 'message', 'args', 'method_info', - 'method_kwargs', 'admin_password')]} - - has_method = 'method' in msg_data and msg_data['method'] in SANITIZE - has_context_token = '_context_auth_token' in msg_data - has_token = 'auth_token' in msg_data - - if not any([has_method, has_context_token, has_token]): - return log_func(msg, msg_data) - - msg_data = copy.deepcopy(msg_data) - - if has_method: - for arg in SANITIZE.get(msg_data['method'], []): - try: - d = msg_data - for elem in arg[:-1]: - d = d[elem] - d[arg[-1]] = '' - except KeyError as e: - LOG.info(_('Failed to sanitize %(item)s. Key error %(err)s'), - {'item': arg, - 'err': e}) - - if has_context_token: - msg_data['_context_auth_token'] = '' - - if has_token: - msg_data['auth_token'] = '' - - return log_func(msg, msg_data) + SANITIZE = ['_context_auth_token', 'auth_token', 'new_pass'] + + def _fix_passwords(d): + """Sanitizes the password fields in the dictionary.""" + for k in d.iterkeys(): + if k.lower().find('password') != -1: + d[k] = '' + elif k.lower() in SANITIZE: + d[k] = '' + elif isinstance(d[k], dict): + _fix_passwords(d[k]) + return d + + return log_func(msg, _fix_passwords(copy.deepcopy(msg_data))) def serialize_remote_exception(failure_info, log_failure=True): diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index 2caaa0e..3afb966 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -18,7 +18,6 @@ import itertools import socket import ssl -import sys import time import uuid @@ -31,15 +30,19 @@ from oslo.config import cfg from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import network_utils from billingstack.openstack.common.rpc import amqp as rpc_amqp from billingstack.openstack.common.rpc import common as rpc_common +from billingstack.openstack.common import sslutils kombu_opts = [ cfg.StrOpt('kombu_ssl_version', default='', - help='SSL version to use (valid only if SSL enabled)'), + help='SSL version to use (valid only if SSL enabled). ' + 'valid values are TLSv1, SSLv23 and SSLv3. SSLv2 may ' + 'be available on some distributions' + ), cfg.StrOpt('kombu_ssl_keyfile', default='', help='SSL key file (valid only if SSL enabled)'), @@ -83,12 +86,6 @@ default=0, help='maximum retries with trying to connect to RabbitMQ ' '(the default of 0 implies an infinite retry count)'), - cfg.IntOpt('rabbit_heartbeat', - default=0, - help='Seconds between connection keepalive heartbeats'), - cfg.BoolOpt('rabbit_durable_queues', - default=False, - help='use durable queues in RabbitMQ'), cfg.BoolOpt('rabbit_ha_queues', default=False, help='use H/A queues in RabbitMQ (x-ha-policy: all).' @@ -260,9 +257,9 @@ def __init__(self, conf, channel, topic, callback, tag, name=None, Other kombu options may be passed as keyword arguments """ # Default options - options = {'durable': conf.rabbit_durable_queues, + options = {'durable': conf.amqp_durable_queues, 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': False, + 'auto_delete': conf.amqp_auto_delete, 'exclusive': False} options.update(kwargs) exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) @@ -366,8 +363,8 @@ def __init__(self, conf, channel, topic, **kwargs): Kombu options may be passed as keyword args to override defaults """ - options = {'durable': conf.rabbit_durable_queues, - 'auto_delete': False, + options = {'durable': conf.amqp_durable_queues, + 'auto_delete': conf.amqp_auto_delete, 'exclusive': False} options.update(kwargs) exchange_name = rpc_amqp.get_control_exchange(conf) @@ -397,7 +394,7 @@ class NotifyPublisher(TopicPublisher): """Publisher class for 'notify'.""" def __init__(self, conf, channel, topic, **kwargs): - self.durable = kwargs.pop('durable', conf.rabbit_durable_queues) + self.durable = kwargs.pop('durable', conf.amqp_durable_queues) self.queue_arguments = _get_queue_arguments(conf) super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs) @@ -453,7 +450,6 @@ def __init__(self, conf, server_params=None): 'userid': self.conf.rabbit_userid, 'password': self.conf.rabbit_password, 'virtual_host': self.conf.rabbit_virtual_host, - 'heartbeat': self.conf.rabbit_heartbeat, } for sp_key, value in server_params.iteritems(): @@ -482,7 +478,8 @@ def _fetch_ssl_params(self): # http://docs.python.org/library/ssl.html - ssl.wrap_socket if self.conf.kombu_ssl_version: - ssl_params['ssl_version'] = self.conf.kombu_ssl_version + ssl_params['ssl_version'] = sslutils.validate_ssl_version( + self.conf.kombu_ssl_version) if self.conf.kombu_ssl_keyfile: ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile if self.conf.kombu_ssl_certfile: @@ -565,13 +562,11 @@ def reconnect(self): log_info.update(params) if self.max_retries and attempt == self.max_retries: - LOG.error(_('Unable to connect to AMQP server on ' - '%(hostname)s:%(port)d after %(max_retries)d ' - 'tries: %(err_str)s') % log_info) - # NOTE(comstud): Copied from original code. There's - # really no better recourse because if this was a queue we - # need to consume on, we have no way to consume anymore. - sys.exit(1) + msg = _('Unable to connect to AMQP server on ' + '%(hostname)s:%(port)d after %(max_retries)d ' + 'tries: %(err_str)s') % log_info + LOG.error(msg) + raise rpc_common.RPCException(msg) if attempt == 1: sleep_time = self.interval_start or 1 diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index 13997b8..b58e779 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -25,7 +25,7 @@ from oslo.config import cfg from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils from billingstack.openstack.common import log as logging @@ -181,11 +181,16 @@ def __init__(self, conf, session, msg_id, callback): 'callback' is the callback to call when messages are received """ - super(DirectConsumer, self).__init__(session, callback, - "%s/%s" % (msg_id, msg_id), - {"type": "direct"}, - msg_id, - {"exclusive": True}) + super(DirectConsumer, self).__init__( + session, callback, + "%s/%s" % (msg_id, msg_id), + {"type": "direct"}, + msg_id, + { + "auto-delete": conf.amqp_auto_delete, + "exclusive": True, + "durable": conf.amqp_durable_queues, + }) class TopicConsumer(ConsumerBase): @@ -203,9 +208,14 @@ def __init__(self, conf, session, topic, callback, name=None, """ exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) - super(TopicConsumer, self).__init__(session, callback, - "%s/%s" % (exchange_name, topic), - {}, name or topic, {}) + super(TopicConsumer, self).__init__( + session, callback, + "%s/%s" % (exchange_name, topic), + {}, name or topic, + { + "auto-delete": conf.amqp_auto_delete, + "durable": conf.amqp_durable_queues, + }) class FanoutConsumer(ConsumerBase): @@ -228,7 +238,7 @@ def __init__(self, conf, session, topic, callback): {"exclusive": True}) def reconnect(self, session): - topic = self.get_node_name() + topic = self.get_node_name().rpartition('_fanout')[0] params = { 'session': session, 'topic': topic, diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index 9663a31..d6624ee 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -27,7 +27,7 @@ from oslo.config import cfg from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import jsonutils from billingstack.openstack.common.rpc import common as rpc_common diff --git a/billingstack/openstack/common/rpc/matchmaker.py b/billingstack/openstack/common/rpc/matchmaker.py index 722b6f8..fcb0965 100644 --- a/billingstack/openstack/common/rpc/matchmaker.py +++ b/billingstack/openstack/common/rpc/matchmaker.py @@ -23,7 +23,7 @@ import eventlet from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging diff --git a/billingstack/openstack/common/rpc/matchmaker_ring.py b/billingstack/openstack/common/rpc/matchmaker_ring.py index ecec28e..db3b1b4 100644 --- a/billingstack/openstack/common/rpc/matchmaker_ring.py +++ b/billingstack/openstack/common/rpc/matchmaker_ring.py @@ -23,7 +23,7 @@ from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import matchmaker as mm diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py index cda9f61..6edf599 100644 --- a/billingstack/openstack/common/rpc/proxy.py +++ b/billingstack/openstack/common/rpc/proxy.py @@ -69,7 +69,7 @@ def _set_version(self, msg, vers): v = vers if vers else self.default_version if (self.version_cap and not rpc_common.version_is_compatible(self.version_cap, v)): - raise rpc_common.RpcVersionCapError(version=self.version_cap) + raise rpc_common.RpcVersionCapError(version_cap=self.version_cap) msg['version'] = v def _get_topic(self, topic): diff --git a/billingstack/openstack/common/rpc/service.py b/billingstack/openstack/common/rpc/service.py index 225fcc8..385b2be 100644 --- a/billingstack/openstack/common/rpc/service.py +++ b/billingstack/openstack/common/rpc/service.py @@ -17,7 +17,7 @@ # License for the specific language governing permissions and limitations # under the License. -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import log as logging from billingstack.openstack.common import rpc from billingstack.openstack.common.rpc import dispatcher as rpc_dispatcher @@ -32,10 +32,11 @@ class Service(service.Service): A service enables rpc by listening to queues based on topic and host. """ - def __init__(self, host, topic, manager=None): + def __init__(self, host, topic, manager=None, serializer=None): super(Service, self).__init__() self.host = host self.topic = topic + self.serializer = serializer if manager is None: self.manager = self else: @@ -48,7 +49,8 @@ def start(self): LOG.debug(_("Creating Consumer connection for Service %s") % self.topic) - dispatcher = rpc_dispatcher.RpcDispatcher([self.manager]) + dispatcher = rpc_dispatcher.RpcDispatcher([self.manager], + self.serializer) # Share this same connection for these Consumers self.conn.create_consumer(self.topic, dispatcher, fanout=False) diff --git a/billingstack/openstack/common/service.py b/billingstack/openstack/common/service.py index 8b2dc14..613340a 100644 --- a/billingstack/openstack/common/service.py +++ b/billingstack/openstack/common/service.py @@ -27,11 +27,12 @@ import time import eventlet +from eventlet import event import logging as std_logging from oslo.config import cfg from billingstack.openstack.common import eventlet_backdoor -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa from billingstack.openstack.common import importutils from billingstack.openstack.common import log as logging from billingstack.openstack.common import threadgroup @@ -51,20 +52,9 @@ def __init__(self): :returns: None """ - self._services = threadgroup.ThreadGroup() + self.services = Services() self.backdoor_port = eventlet_backdoor.initialize_if_enabled() - @staticmethod - def run_service(service): - """Start and wait for a service to finish. - - :param service: service to run and wait for. - :returns: None - - """ - service.start() - service.wait() - def launch_service(self, service): """Load and start the given service. @@ -73,7 +63,7 @@ def launch_service(self, service): """ service.backdoor_port = self.backdoor_port - self._services.add_thread(self.run_service, service) + self.services.add(service) def stop(self): """Stop all services which are currently running. @@ -81,7 +71,7 @@ def stop(self): :returns: None """ - self._services.stop() + self.services.stop() def wait(self): """Waits until all services have been stopped, and then returns. @@ -89,7 +79,16 @@ def wait(self): :returns: None """ - self._services.wait() + self.services.wait() + + def restart(self): + """Reload config files and restart service. + + :returns: None + + """ + cfg.CONF.reload_config_files() + self.services.restart() class SignalExit(SystemExit): @@ -103,31 +102,51 @@ def _handle_signal(self, signo, frame): # Allow the process to be killed again and die from natural causes signal.signal(signal.SIGTERM, signal.SIG_DFL) signal.signal(signal.SIGINT, signal.SIG_DFL) + signal.signal(signal.SIGHUP, signal.SIG_DFL) raise SignalExit(signo) - def wait(self): + def handle_signal(self): signal.signal(signal.SIGTERM, self._handle_signal) signal.signal(signal.SIGINT, self._handle_signal) + signal.signal(signal.SIGHUP, self._handle_signal) + + def _wait_for_exit_or_signal(self): + status = None + signo = 0 LOG.debug(_('Full set of CONF:')) CONF.log_opt_values(LOG, std_logging.DEBUG) - status = None try: super(ServiceLauncher, self).wait() except SignalExit as exc: signame = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'}[exc.signo] + signal.SIGINT: 'SIGINT', + signal.SIGHUP: 'SIGHUP'}[exc.signo] LOG.info(_('Caught %s, exiting'), signame) status = exc.code + signo = exc.signo except SystemExit as exc: status = exc.code finally: - if rpc: - rpc.cleanup() self.stop() - return status + if rpc: + try: + rpc.cleanup() + except Exception: + # We're shutting down, so it doesn't matter at this point. + LOG.exception(_('Exception during rpc cleanup.')) + + return status, signo + + def wait(self): + while True: + self.handle_signal() + status, signo = self._wait_for_exit_or_signal() + if signo != signal.SIGHUP: + return status + self.restart() class ServiceWrapper(object): @@ -145,9 +164,12 @@ def __init__(self): self.running = True rfd, self.writepipe = os.pipe() self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r') + self.handle_signal() + def handle_signal(self): signal.signal(signal.SIGTERM, self._handle_signal) signal.signal(signal.SIGINT, self._handle_signal) + signal.signal(signal.SIGHUP, self._handle_signal) def _handle_signal(self, signo, frame): self.sigcaught = signo @@ -156,6 +178,7 @@ def _handle_signal(self, signo, frame): # Allow the process to be killed again and die from natural causes signal.signal(signal.SIGTERM, signal.SIG_DFL) signal.signal(signal.SIGINT, signal.SIG_DFL) + signal.signal(signal.SIGHUP, signal.SIG_DFL) def _pipe_watcher(self): # This will block until the write end is closed when the parent @@ -166,16 +189,47 @@ def _pipe_watcher(self): sys.exit(1) - def _child_process(self, service): + def _child_process_handle_signal(self): # Setup child signal handlers differently def _sigterm(*args): signal.signal(signal.SIGTERM, signal.SIG_DFL) raise SignalExit(signal.SIGTERM) + def _sighup(*args): + signal.signal(signal.SIGHUP, signal.SIG_DFL) + raise SignalExit(signal.SIGHUP) + signal.signal(signal.SIGTERM, _sigterm) + signal.signal(signal.SIGHUP, _sighup) # Block SIGINT and let the parent send us a SIGTERM signal.signal(signal.SIGINT, signal.SIG_IGN) + def _child_wait_for_exit_or_signal(self, launcher): + status = None + signo = 0 + + try: + launcher.wait() + except SignalExit as exc: + signame = {signal.SIGTERM: 'SIGTERM', + signal.SIGINT: 'SIGINT', + signal.SIGHUP: 'SIGHUP'}[exc.signo] + LOG.info(_('Caught %s, exiting'), signame) + status = exc.code + signo = exc.signo + except SystemExit as exc: + status = exc.code + except BaseException: + LOG.exception(_('Unhandled exception')) + status = 2 + finally: + launcher.stop() + + return status, signo + + def _child_process(self, service): + self._child_process_handle_signal() + # Reopen the eventlet hub to make sure we don't share an epoll # fd with parent and/or siblings, which would be bad eventlet.hubs.use_hub() @@ -189,7 +243,8 @@ def _sigterm(*args): random.seed() launcher = Launcher() - launcher.run_service(service) + launcher.launch_service(service) + return launcher def _start_child(self, wrap): if len(wrap.forktimes) > wrap.workers: @@ -210,21 +265,13 @@ def _start_child(self, wrap): # NOTE(johannes): All exceptions are caught to ensure this # doesn't fallback into the loop spawning children. It would # be bad for a child to spawn more children. - status = 0 - try: - self._child_process(wrap.service) - except SignalExit as exc: - signame = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'}[exc.signo] - LOG.info(_('Caught %s, exiting'), signame) - status = exc.code - except SystemExit as exc: - status = exc.code - except BaseException: - LOG.exception(_('Unhandled exception')) - status = 2 - finally: - wrap.service.stop() + launcher = self._child_process(wrap.service) + while True: + self._child_process_handle_signal() + status, signo = self._child_wait_for_exit_or_signal(launcher) + if signo != signal.SIGHUP: + break + launcher.restart() os._exit(status) @@ -270,12 +317,7 @@ def _wait_child(self): wrap.children.remove(pid) return wrap - def wait(self): - """Loop waiting on children to die and respawning as necessary.""" - - LOG.debug(_('Full set of CONF:')) - CONF.log_opt_values(LOG, std_logging.DEBUG) - + def _respawn_children(self): while self.running: wrap = self._wait_child() if not wrap: @@ -284,14 +326,30 @@ def wait(self): # (see bug #1095346) eventlet.greenthread.sleep(.01) continue - while self.running and len(wrap.children) < wrap.workers: self._start_child(wrap) - if self.sigcaught: - signame = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'}[self.sigcaught] - LOG.info(_('Caught %s, stopping children'), signame) + def wait(self): + """Loop waiting on children to die and respawning as necessary.""" + + LOG.debug(_('Full set of CONF:')) + CONF.log_opt_values(LOG, std_logging.DEBUG) + + while True: + self.handle_signal() + self._respawn_children() + if self.sigcaught: + signame = {signal.SIGTERM: 'SIGTERM', + signal.SIGINT: 'SIGINT', + signal.SIGHUP: 'SIGHUP'}[self.sigcaught] + LOG.info(_('Caught %s, stopping children'), signame) + if self.sigcaught != signal.SIGHUP: + break + + for pid in self.children: + os.kill(pid, signal.SIGHUP) + self.running = True + self.sigcaught = None for pid in self.children: try: @@ -313,15 +371,74 @@ class Service(object): def __init__(self, threads=1000): self.tg = threadgroup.ThreadGroup(threads) + # signal that the service is done shutting itself down: + self._done = event.Event() + + def reset(self): + # NOTE(Fengqian): docs for Event.reset() recommend against using it + self._done = event.Event() + def start(self): pass def stop(self): self.tg.stop() + self.tg.wait() + # Signal that service cleanup is done: + if not self._done.ready(): + self._done.send() + + def wait(self): + self._done.wait() + + +class Services(object): + + def __init__(self): + self.services = [] + self.tg = threadgroup.ThreadGroup() + self.done = event.Event() + + def add(self, service): + self.services.append(service) + self.tg.add_thread(self.run_service, service, self.done) + + def stop(self): + # wait for graceful shutdown of services: + for service in self.services: + service.stop() + service.wait() + + # Each service has performed cleanup, now signal that the run_service + # wrapper threads can now die: + if not self.done.ready(): + self.done.send() + + # reap threads: + self.tg.stop() def wait(self): self.tg.wait() + def restart(self): + self.stop() + self.done = event.Event() + for restart_service in self.services: + restart_service.reset() + self.tg.add_thread(self.run_service, restart_service, self.done) + + @staticmethod + def run_service(service, done): + """Service start wrapper. + + :param service: service to run + :param done: event to wait on until a shutdown is triggered + :returns: None + + """ + service.start() + done.wait() + def launch(service, workers=None): if workers: diff --git a/billingstack/openstack/common/sslutils.py b/billingstack/openstack/common/sslutils.py index af20a22..a3ae3c7 100644 --- a/billingstack/openstack/common/sslutils.py +++ b/billingstack/openstack/common/sslutils.py @@ -1,6 +1,6 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# Copyright 2013 IBM +# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -19,7 +19,7 @@ from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ +from billingstack.openstack.common.gettextutils import _ # noqa ssl_opts = [ @@ -78,3 +78,23 @@ def wrap(sock): ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED return ssl.wrap_socket(sock, **ssl_kwargs) + + +_SSL_PROTOCOLS = { + "tlsv1": ssl.PROTOCOL_TLSv1, + "sslv23": ssl.PROTOCOL_SSLv23, + "sslv3": ssl.PROTOCOL_SSLv3 +} + +try: + _SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2 +except AttributeError: + pass + + +def validate_ssl_version(version): + key = version.lower() + try: + return _SSL_PROTOCOLS[key] + except KeyError: + raise RuntimeError(_("Invalid SSL version : %s") % version) diff --git a/billingstack/openstack/common/threadgroup.py b/billingstack/openstack/common/threadgroup.py index e0b867d..2eef8fd 100644 --- a/billingstack/openstack/common/threadgroup.py +++ b/billingstack/openstack/common/threadgroup.py @@ -14,7 +14,7 @@ # License for the specific language governing permissions and limitations # under the License. -from eventlet import greenlet +import eventlet from eventlet import greenpool from eventlet import greenthread @@ -105,7 +105,7 @@ def wait(self): for x in self.timers: try: x.wait() - except greenlet.GreenletExit: + except eventlet.greenlet.GreenletExit: pass except Exception as ex: LOG.exception(ex) @@ -115,7 +115,7 @@ def wait(self): continue try: x.wait() - except greenlet.GreenletExit: + except eventlet.greenlet.GreenletExit: pass except Exception as ex: LOG.exception(ex) From 5897c3cf5a28562fa8cc1350d19d354b21467297 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 28 Jul 2013 16:19:37 +0000 Subject: [PATCH 171/182] Extract merchant id from url Change-Id: Ide09cbf2cec70b190795a79a29517f208e37fbc2 --- billingstack/api/auth.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/billingstack/api/auth.py b/billingstack/api/auth.py index aebfcd3..2807fab 100644 --- a/billingstack/api/auth.py +++ b/billingstack/api/auth.py @@ -43,10 +43,17 @@ def pipeline_factory(loader, global_conf, **local_conf): class NoAuthContextMiddleware(wsgi.Middleware): + def merchant_id(self, request): + parts = [p for p in request.path_info.split('/') if p] + if parts[0] == 'merchants' and len(parts) >= 2: + return parts[1] + def process_request(self, request): + merchant_id = self.merchant_id(request) + # NOTE(kiall): This makes the assumption that disabling authentication # means you wish to allow full access to everyone. - context = RequestContext(is_admin=True) + context = RequestContext(is_admin=True, tenant=merchant_id) # Store the context where oslo-log exepcts to find it. local.store.context = context From b0f909200f5e79607b2697818af19b93514cb855 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 28 Jul 2013 18:28:59 +0000 Subject: [PATCH 172/182] V2 API using Pecan and WSME This change removes the v1 API completely and adds v2 using the Pecan framework and WSME for (de)serialization. Also add a small hack where we subclass RestController for now to support PATCH blueprint v2-pecan * Not added (Needs be determined) Invoice Lines * Added resources Currency Language InvoiceStates PGProvider Merchant Customer PaymentMethods PaymentGatewayConfiguration Plan Product Subscription Invoice Usage Change-Id: I3d252b05d5d49664e3038fa0b1fa8a6e210d03cd --- billingstack/api/__init__.py | 10 - billingstack/api/app.py | 91 +++ billingstack/api/auth.py | 62 -- billingstack/api/base.py | 106 +-- billingstack/api/hooks.py | 40 + billingstack/api/middleware/__init__.py | 0 billingstack/api/middleware/errors.py | 80 -- billingstack/api/service.py | 55 -- billingstack/api/v1/__init__.py | 55 -- billingstack/api/v1/resources.py | 728 ------------------ billingstack/{wsgi.py => api/v2/__init__.py} | 19 +- .../controllers/__init__.py} | 22 +- billingstack/api/v2/controllers/currency.py | 67 ++ billingstack/api/v2/controllers/customer.py | 74 ++ billingstack/api/v2/controllers/invoice.py | 73 ++ .../api/v2/controllers/invoice_state.py | 68 ++ billingstack/api/v2/controllers/language.py | 67 ++ billingstack/api/v2/controllers/merchant.py | 85 ++ billingstack/api/v2/controllers/payment.py | 139 ++++ billingstack/api/v2/controllers/plan.py | 116 +++ billingstack/api/v2/controllers/product.py | 74 ++ billingstack/api/v2/controllers/root.py | 42 + .../api/v2/controllers/subscription.py | 75 ++ billingstack/api/v2/controllers/usage.py | 73 ++ billingstack/api/{v1 => v2}/models.py | 0 billingstack/tests/api/base.py | 77 +- billingstack/tests/api/v1/__init__.py | 0 billingstack/tests/api/v1/base.py | 0 billingstack/tests/api/v2/__init__.py | 5 + .../tests/api/{v1 => v2}/test_currency.py | 6 +- .../tests/api/{v1 => v2}/test_customer.py | 8 +- .../api/{v1 => v2}/test_invoice_state.py | 8 +- .../tests/api/{v1 => v2}/test_language.py | 6 +- .../tests/api/{v1 => v2}/test_merchant.py | 8 +- .../api/{v1 => v2}/test_payment_method.py | 8 +- .../tests/api/{v1 => v2}/test_plan.py | 6 +- .../tests/api/{v1 => v2}/test_product.py | 6 +- billingstack/tests/base.py | 11 + bin/billingstack-api | 36 - etc/billingstack/api-paste.ini.sample | 36 - setup.cfg | 2 +- tools/pip-requires | 4 +- 42 files changed, 1208 insertions(+), 1240 deletions(-) create mode 100644 billingstack/api/app.py delete mode 100644 billingstack/api/auth.py create mode 100644 billingstack/api/hooks.py delete mode 100644 billingstack/api/middleware/__init__.py delete mode 100644 billingstack/api/middleware/errors.py delete mode 100644 billingstack/api/service.py delete mode 100644 billingstack/api/v1/__init__.py delete mode 100644 billingstack/api/v1/resources.py rename billingstack/{wsgi.py => api/v2/__init__.py} (56%) rename billingstack/api/{versions.py => v2/controllers/__init__.py} (53%) create mode 100644 billingstack/api/v2/controllers/currency.py create mode 100644 billingstack/api/v2/controllers/customer.py create mode 100644 billingstack/api/v2/controllers/invoice.py create mode 100644 billingstack/api/v2/controllers/invoice_state.py create mode 100644 billingstack/api/v2/controllers/language.py create mode 100644 billingstack/api/v2/controllers/merchant.py create mode 100644 billingstack/api/v2/controllers/payment.py create mode 100644 billingstack/api/v2/controllers/plan.py create mode 100644 billingstack/api/v2/controllers/product.py create mode 100644 billingstack/api/v2/controllers/root.py create mode 100644 billingstack/api/v2/controllers/subscription.py create mode 100644 billingstack/api/v2/controllers/usage.py rename billingstack/api/{v1 => v2}/models.py (100%) delete mode 100644 billingstack/tests/api/v1/__init__.py delete mode 100644 billingstack/tests/api/v1/base.py create mode 100644 billingstack/tests/api/v2/__init__.py rename billingstack/tests/api/{v1 => v2}/test_currency.py (92%) rename billingstack/tests/api/{v1 => v2}/test_customer.py (92%) rename billingstack/tests/api/{v1 => v2}/test_invoice_state.py (92%) rename billingstack/tests/api/{v1 => v2}/test_language.py (92%) rename billingstack/tests/api/{v1 => v2}/test_merchant.py (88%) rename billingstack/tests/api/{v1 => v2}/test_payment_method.py (94%) rename billingstack/tests/api/{v1 => v2}/test_plan.py (93%) rename billingstack/tests/api/{v1 => v2}/test_product.py (93%) delete mode 100644 bin/billingstack-api delete mode 100644 etc/billingstack/api-paste.ini.sample diff --git a/billingstack/api/__init__.py b/billingstack/api/__init__.py index c4609c6..0defd31 100644 --- a/billingstack/api/__init__.py +++ b/billingstack/api/__init__.py @@ -17,25 +17,15 @@ # under the License. # # Copied: Moniker -import flask -from billingstack.openstack.common import jsonutils as json - from oslo.config import cfg API_SERVICE_OPTS = [ cfg.IntOpt('api_port', default=9091, help='The port for the billing API server'), cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('api_paste_config', default='api-paste.ini', - help='File name for the paste.deploy config for the api'), cfg.StrOpt('auth_strategy', default='noauth', help='The strategy to use for auth. Supports noauth or ' 'keystone'), ] cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:api') - -# Allows us to serialize datetime's etc -flask.helpers.json = json diff --git a/billingstack/api/app.py b/billingstack/api/app.py new file mode 100644 index 0000000..3819883 --- /dev/null +++ b/billingstack/api/app.py @@ -0,0 +1,91 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +import logging +import os +import pecan +from oslo.config import cfg +from wsgiref import simple_server + +from billingstack import service +from billingstack.api import hooks +from billingstack.openstack.common import log + +cfg.CONF.import_opt('state_path', 'billingstack.paths') + +LOG = log.getLogger(__name__) + + +def get_config(): + conf = { + 'app': { + 'root': 'billingstack.api.v2.controllers.root.RootController', + 'modules': ['designate.api.v2'], + } + } + return pecan.configuration.conf_from_dict(conf) + + +def setup_app(pecan_config=None, extra_hooks=None): + app_hooks = [ + hooks.NoAuthHook() + ] + + if extra_hooks: + app_hooks.extend(extra_hooks) + + pecan_config = pecan_config or get_config() + + pecan.configuration.set_config(dict(pecan_config), overwrite=True) + + app = pecan.make_app( + pecan_config.app.root, + debug=cfg.CONF.debug, + hooks=app_hooks, + force_canonical=getattr(pecan_config.app, 'force_canonical', True) + ) + + return app + + +class VersionSelectorApplication(object): + def __init__(self): + self.v2 = setup_app() + + def __call__(self, environ, start_response): + return self.v2(environ, start_response) + + +def start(): + service.prepare_service() + + root = VersionSelectorApplication() + + host = cfg.CONF['service:api'].api_listen + port = cfg.CONF['service:api'].api_port + + srv = simple_server.make_server(host, port, root) + + LOG.info('Starting server in PID %s' % os.getpid()) + LOG.info("Configuration:") + cfg.CONF.log_opt_values(LOG, logging.INFO) + + if host == '0.0.0.0': + LOG.info('serving on 0.0.0.0:%s, view at http://127.0.0.1:%s' % + (port, port)) + else: + LOG.info("serving on http://%s:%s" % (host, port)) + + srv.serve_forever() diff --git a/billingstack/api/auth.py b/billingstack/api/auth.py deleted file mode 100644 index 2807fab..0000000 --- a/billingstack/api/auth.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker - -from oslo.config import cfg - -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.context import RequestContext -from billingstack import wsgi - -LOG = logging.getLogger(__name__) - - -def pipeline_factory(loader, global_conf, **local_conf): - """ - A paste pipeline replica that keys off of auth_strategy. - - Code nabbed from cinder. - """ - pipeline = local_conf[cfg.CONF['service:api'].auth_strategy] - pipeline = pipeline.split() - filters = [loader.get_filter(n) for n in pipeline[:-1]] - app = loader.get_app(pipeline[-1]) - filters.reverse() - for filter in filters: - app = filter(app) - return app - - -class NoAuthContextMiddleware(wsgi.Middleware): - def merchant_id(self, request): - parts = [p for p in request.path_info.split('/') if p] - if parts[0] == 'merchants' and len(parts) >= 2: - return parts[1] - - def process_request(self, request): - merchant_id = self.merchant_id(request) - - # NOTE(kiall): This makes the assumption that disabling authentication - # means you wish to allow full access to everyone. - context = RequestContext(is_admin=True, tenant=merchant_id) - - # Store the context where oslo-log exepcts to find it. - local.store.context = context - - # Attach the context to the request environment - request.environ['context'] = context diff --git a/billingstack/api/base.py b/billingstack/api/base.py index 8ba2c79..08bd938 100644 --- a/billingstack/api/base.py +++ b/billingstack/api/base.py @@ -14,15 +14,12 @@ # License for the specific language governing permissions and limitations # under the License. -import functools -import mimetypes +import pecan.rest -from flask import request, Blueprint from wsme.types import Base, Enum, UserType, text, Unset, wsproperty from oslo.config import cfg -from billingstack.api import utils from billingstack.openstack.common import log @@ -43,6 +40,11 @@ ] +class RestController(pecan.rest.RestController): + def _handle_patch(self, method, remainder): + return self._handle_post(method, remainder) + + class Property(UserType): """ A Property that just passes the value around... @@ -57,6 +59,27 @@ def fromnativetype(self, value): property_type = Property() +def _query_to_criterion(query, storage_func=None, **kw): + """ + Iterate over the query checking against the valid signatures (later). + + :param query: A list of queries. + :param storage_func: The name of the storage function to very against. + """ + translation = { + 'customer': 'customer_id' + } + + criterion = {} + for q in query: + key = translation.get(q.field, q.field) + criterion[key] = q.as_dict() + + criterion.update(kw) + + return criterion + + operation_kind = Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') @@ -133,78 +156,3 @@ def from_db(cls, values): Return a class of this object from values in the from_db """ return cls(**values) - - -class Rest(Blueprint): - """ - Helper to do stuff - """ - def get(self, rule, status_code=200, **kw): - return self._mroute('GET', rule, status_code, **kw) - - def post(self, rule, status_code=202, **kw): - return self._mroute('POST', rule, status_code, **kw) - - def patch(self, rule, status_code=202, **kw): - return self._mroute('PATCH', rule, status_code, **kw) - - def put(self, rule, status_code=202, **kw): - return self._mroute('PUT', rule, status_code, **kw) - - def delete(self, rule, status_code=204, **kw): - return self._mroute('DELETE', rule, status_code, **kw) - - def _mroute(self, methods, rule, status_code=None, **kw): - if type(methods) is str: - methods = [methods] - - return self.route(rule, methods=methods, status_code=status_code, - **kw) - - def guess_response_type(self, type_suffix=None): - """ - Get the MIME type based on keywords / request - """ - if type_suffix: - response_type = mimetypes.guess_type("res." + type_suffix)[0] - request.response_type = response_type - - def route(self, rule, sig_args=[], sig_kw={}, **options): - """ - Helper function that sets up the route as well as adding CORS.. - """ - status = options.pop('status_code', None) - - def decorator(func): - endpoint = options.pop('endpoint', func.__name__) - - if 'body' in options and 'body' not in sig_kw: - sig_kw['body'] = options['body'] - - # NOTE: Wrap the function with CORS support. - @utils.crossdomain(origin=cfg.CONF.cors_allowed_origin, - max_age=cfg.CONF.cors_max_age, - headers=",".join(CORS_ALLOW_HEADERS)) - @functools.wraps(func) - def handler(**kw): - # extract response content type - self.guess_response_type(kw.pop('response_type', None)) - - # NOTE: Extract fields (column selection) - fields = list(set(request.args.getlist('fields'))) - fields.sort() - request.fields_selector = fields - - if hasattr(func, '_wsme_definition'): - func._wsme_definition.status_code = status - - return func(**kw) - - #_rule = "/" + rule - # NOTE: Add 2 set of rules, 1 with response content type and one wo - self.add_url_rule(rule, endpoint, handler, **options) - rtype_rule = rule + '.' - self.add_url_rule(rtype_rule, endpoint, handler, **options) - - return func - return decorator diff --git a/billingstack/api/hooks.py b/billingstack/api/hooks.py new file mode 100644 index 0000000..e68269e --- /dev/null +++ b/billingstack/api/hooks.py @@ -0,0 +1,40 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import hooks + +from billingstack.openstack.common.context import RequestContext + + +class NoAuthHook(hooks.PecanHook): + """ + Simple auth - all requests will be is_admin=True + """ + def merchant_id(self, path): + """ + Get merchant id from url + """ + parts = [p for p in path.split('/') if p] + try: + index = parts.index('merchants') + 1 + return parts[index] + except ValueError: + return + except IndexError: + return + + def before(self, state): + merchant_id = self.merchant_id(state.request.path_url) + state.request.ctxt = RequestContext(tenant=merchant_id, is_admin=True) diff --git a/billingstack/api/middleware/__init__.py b/billingstack/api/middleware/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/api/middleware/errors.py b/billingstack/api/middleware/errors.py deleted file mode 100644 index 5cd9d68..0000000 --- a/billingstack/api/middleware/errors.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -import flask -import webob.dec -from billingstack import exceptions -from billingstack import wsgi -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common import log -from billingstack.openstack.common import jsonutils as json - -LOG = log.getLogger(__name__) - - -class FaultWrapperMiddleware(wsgi.Middleware): - @webob.dec.wsgify - def __call__(self, request): - try: - return request.get_response(self.application) - except exceptions.Base, e: - # Handle Moniker Exceptions - status = e.error_code if hasattr(e, 'error_code') else 500 - - # Start building up a response - response = { - 'code': status - } - - if hasattr(e, 'error_type'): - response['type'] = e.error_type - - if hasattr(e, 'errors'): - response['errors'] = e.errors - - response['message'] = e.get_message() - - return self._handle_exception(request, e, status, response) - except rpc_common.Timeout, e: - # Special case for RPC timeout's - response = { - 'code': 504, - 'type': 'timeout', - } - - return self._handle_exception(request, e, 504, response) - except Exception, e: - # Handle all other exception types - return self._handle_exception(request, e) - - def _handle_exception(self, request, e, status=500, response={}): - # Log the exception ASAP - LOG.exception(e) - - headers = [ - ('Content-Type', 'application/json'), - ] - - # Set a response code, if one is missing. - if 'code' not in response: - response['code'] = status - - # TODO(kiall): Send a fault notification - - # Return the new response - return flask.Response(status=status, headers=headers, - response=json.dumps(response)) diff --git a/billingstack/api/service.py b/billingstack/api/service.py deleted file mode 100644 index fbdfabd..0000000 --- a/billingstack/api/service.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from oslo.config import cfg -from paste import deploy - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import wsgi -from billingstack import exceptions -from billingstack import utils -#from billingstack import policy - - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -LOG = logging.getLogger(__name__) - - -class Service(wsgi.Service): - def __init__(self, backlog=128, threads=1000): - - api_paste_config = cfg.CONF['service:api'].api_paste_config - config_paths = utils.find_config(api_paste_config) - - if len(config_paths) == 0: - msg = 'Unable to determine appropriate api-paste-config file' - raise exceptions.ConfigurationError(msg) - - LOG.info('Using api-paste-config found at: %s' % config_paths[0]) - - #policy.init_policy() - - application = deploy.loadapp("config:%s" % config_paths[0], - name='bs_api') - - super(Service, self).__init__(application=application, - host=cfg.CONF['service:api'].api_listen, - port=cfg.CONF['service:api'].api_port, - backlog=backlog, - threads=threads) diff --git a/billingstack/api/v1/__init__.py b/billingstack/api/v1/__init__.py deleted file mode 100644 index 5550aba..0000000 --- a/billingstack/api/v1/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -import flask -from oslo.config import cfg -from stevedore import named -from billingstack.openstack.common import log as logging -from billingstack.api.v1.resources import bp as v1_bp - -LOG = logging.getLogger(__name__) - - -cfg.CONF.register_opts([ - cfg.ListOpt('enabled-extensions-v1', default=[], - help='Enabled API Extensions'), -], group='service:api') - - -def factory(global_config, **local_conf): - app = flask.Flask('billingstack.api.v1') - - app.config.update( - PROPAGATE_EXCEPTIONS=True - ) - - app.register_blueprint(v1_bp) - - # TODO(kiall): Ideally, we want to make use of the Plugin class here. - # This works for the moment though. - def _register_blueprint(ext): - app.register_blueprint(ext.plugin) - - # Add any (enabled) optional extensions - extensions = cfg.CONF['service:api'].enabled_extensions_v1 - - if len(extensions) > 0: - extmgr = named.NamedExtensionManager('billingstack.api.v1.extensions', - names=extensions) - extmgr.map(_register_blueprint) - - return app diff --git a/billingstack/api/v1/resources.py b/billingstack/api/v1/resources.py deleted file mode 100644 index f29af74..0000000 --- a/billingstack/api/v1/resources.py +++ /dev/null @@ -1,728 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from flask import request -from flask import Response - -from billingstack.api.base import Rest, Query -from billingstack.api.v1 import models -from billingstack.biller.rpcapi import biller_api -from billingstack.central.rpcapi import central_api -from billingstack.rater.rpcapi import rater_api - -from wsmeext.flask import signature - - -bp = Rest('v1', __name__) - - -def _query_to_criterion(query, storage_func=None, **kw): - """ - Iterate over the query checking against the valid signatures (later). - - :param query: A list of queries. - :param storage_func: The name of the storage function to very against. - """ - translation = { - 'customer': 'customer_id' - } - - criterion = {} - for q in query: - key = translation.get(q.field, q.field) - criterion[key] = q.as_dict() - - criterion.update(kw) - - return criterion - - -# Currencies -@bp.post('/currencies') -@signature(models.Currency, body=models.Currency) -def create_currency(body): - row = central_api.create_currency( - request.environ['context'], body.to_db()) - return models.Currency.from_db(row) - - -@bp.get('/currencies') -@signature([models.Currency], [Query]) -def list_currencies(q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_currencies( - request.environ['context'], criterion=criterion) - - return map(models.Currency.from_db, rows) - - -@bp.get('/currencies/') -@signature(models.Currency, str) -def get_currency(currency_id): - row = central_api.get_currency(request.environ['context'], - currency_id) - - return models.Currency.from_db(row) - - -@bp.put('/currencies/') -@signature(models.Currency, str, body=models.Currency) -def update_currency(currency_id, body): - row = central_api.update_currency( - request.environ['context'], - currency_id, - body.to_db()) - - return models.Currency.from_db(row) - - -@bp.delete('/currencies/') -def delete_currency(currency_id): - central_api.delete_currency(request.environ['context'], currency_id) - return Response(status=204) - - -# Language -@bp.post('/languages') -@signature(models.Language, body=models.Language) -def create_language(body): - row = central_api.create_language(request.environ['context'], - body.to_db()) - - return models.Language.from_db(row) - - -@bp.get('/languages') -@signature([models.Language], [Query]) -def list_languages(q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_languages( - request.environ['context'], criterion=criterion) - - return map(models.Language.from_db, rows) - - -@bp.get('/languages/') -@signature(models.Language, str) -def get_language(language_id): - row = central_api.get_language(request.environ['context'], - language_id) - - return models.Language.from_db(row) - - -@bp.put('/languages/') -@signature(models.Language, str, body=models.Language) -def update_language(language_id, body): - row = central_api.update_language( - request.environ['context'], - language_id, - body.to_db()) - - return models.Language.from_db(row) - - -@bp.delete('/languages/') -def delete_language(language_id): - central_api.delete_language(request.environ['context'], language_id) - return Response(status=204) - - -# PGP / PGM -@bp.get('/payment-gateway-providers') -@signature([models.PGProvider], [Query]) -def list_pg_providers(q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_pg_providers( - request.environ['context'], criterion=criterion) - - return map(models.PGProvider.from_db, rows) - - -# invoice_states -@bp.post('/invoice-states') -@signature(models.InvoiceState, body=models.InvoiceState) -def create_invoice_state(body): - row = biller_api.create_invoice_state( - request.environ['context'], body.to_db()) - - return models.InvoiceState.from_db(row) - - -@bp.get('/invoice-states') -@signature([models.InvoiceState], [Query]) -def list_invoice_states(q=[]): - criterion = _query_to_criterion(q) - - rows = biller_api.list_invoice_states( - request.environ['context'], criterion=criterion) - - return map(models.InvoiceState.from_db, rows) - - -@bp.get('/invoice-states/') -@signature(models.InvoiceState, str,) -def get_invoice_state(state_id): - row = biller_api.get_invoice_state(request.environ['context'], - state_id) - - return models.InvoiceState.from_db(row) - - -@bp.put('/invoice-states/') -@signature(models.InvoiceState, str, body=models.InvoiceState) -def update_invoice_state(state_id, body): - row = biller_api.update_invoice_state( - request.environ['context'], - state_id, - body.to_db()) - - return models.InvoiceState.from_db(row) - - -@bp.delete('/invoice-states/') -def delete_invoice_state(state_id): - biller_api.delete_invoice_state( - request.environ['context'], - state_id) - return Response(status=204) - - -# merchants -@bp.post('/merchants') -@signature(models.Merchant, body=models.Merchant) -def create_merchant(body): - row = central_api.create_merchant(request.environ['context'], - body.to_db()) - - return models.Merchant.from_db(row) - - -@bp.get('/merchants') -@signature([models.Merchant], [Query]) -def list_merchants(q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_merchants( - request.environ['context'], criterion=criterion) - - return map(models.Merchant.from_db, rows) - - -@bp.get('/merchants/') -@signature(models.Merchant, str) -def get_merchant(merchant_id): - row = central_api.get_merchant(request.environ['context'], - merchant_id) - - return models.Merchant.from_db(row) - - -@bp.put('/merchants/') -@signature(models.Merchant, str, body=models.Merchant) -def update_merchant(merchant_id, body): - row = central_api.update_merchant( - request.environ['context'], - merchant_id, - body.to_db()) - - return models.Merchant.from_db(row) - - -@bp.delete('/merchants/') -def delete_merchant(merchant_id): - central_api.delete_merchant(request.environ['context'], merchant_id) - return Response(status=204) - - -# Invoices -@bp.post('/merchants//payment-gateways') -@signature(models.PGConfig, str, body=models.PGConfig) -def create_payment_gateway(merchant_id, body): - row = central_api.create_pg_config( - request.environ['context'], - merchant_id, - body.to_db()) - - return models.PGConfig.from_db(row) - - -@bp.get('/merchants//payment-gateways') -@signature([models.PGConfig], str, [Query]) -def list_payment_gateways(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = central_api.list_pg_configs( - request.environ['context'], criterion=criterion) - - return map(models.PGConfig.from_db, rows) - - -@bp.get('/merchants//payment-gateways/') -@signature(models.PGConfig, str, str) -def get_payment_gateway(merchant_id, pg_config_id): - row = central_api.get_pg_config(request.environ['context'], pg_config_id) - - return models.PGConfig.from_db(row) - - -@bp.put('/merchants//payment-gateways/') -@signature(models.PGConfig, str, str, body=models.PGConfig) -def update_payment_gateway(merchant_id, pg_config_id, body): - row = central_api.update_pg_config( - request.environ['context'], - pg_config_id, - body.to_db()) - - return models.PGConfig.from_db(row) - - -@bp.delete('/merchants//payment-gateways/') -def delete_pg_config(merchant_id, pg_config_id): - central_api.delete_pg_config( - request.environ['context'], - pg_config_id) - return Response(status=204) - - -# customers -@bp.post('/merchants//customers') -@signature(models.Customer, str, body=models.Customer) -def create_customer(merchant_id, body): - row = central_api.create_customer( - request.environ['context'], - merchant_id, - body.to_db()) - - return models.Customer.from_db(row) - - -@bp.get('/merchants//customers') -@signature([models.Customer], str, [Query]) -def list_customers(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = central_api.list_customers( - request.environ['context'], criterion=criterion) - - return map(models.Customer.from_db, rows) - - -@bp.get('/merchants//customers/') -@signature(models.Customer, str, str) -def get_customer(merchant_id, customer_id): - row = central_api.get_customer(request.environ['context'], - customer_id) - - return models.Customer.from_db(row) - - -@bp.put('/merchants//customers/') -@signature(models.Customer, str, str, body=models.Customer) -def update_customer(merchant_id, customer_id, body): - row = central_api.update_customer( - request.environ['context'], - customer_id, - body.to_db()) - - return models.Customer.from_db(row) - - -@bp.delete('/merchants//customers/') -def delete_customer(merchant_id, customer_id): - central_api.delete_customer(request.environ['context'], customer_id) - return Response(status=204) - - -# PaymentMethods -@bp.post('/merchants//customers//payment-methods') -@signature(models.PaymentMethod, str, str, body=models.PaymentMethod) -def create_payment_method(merchant_id, customer_id, body): - row = central_api.create_payment_method( - request.environ['context'], - customer_id, - body.to_db()) - - return models.PaymentMethod.from_db(row) - - -@bp.get('/merchants//customers//payment-methods') -@signature([models.PaymentMethod], str, str, [Query]) -def list_payment_methods(merchant_id, customer_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id, - customer_id=customer_id) - - rows = central_api.list_payment_methods( - request.environ['context'], criterion=criterion) - - return map(models.PaymentMethod.from_db, rows) - - -@bp.get('/merchants//customers//payment-methods/' - '') -@signature(models.PaymentMethod, str, str, str) -def get_payment_method(merchant_id, customer_id, pm_id): - row = central_api.get_payment_method(request.environ['context'], pm_id) - - return models.PaymentMethod.from_db(row) - - -@bp.put('/merchants//customers//payment-methods/' - '') -@signature(models.PaymentMethod, str, str, str, body=models.PaymentMethod) -def update_payment_method(merchant_id, customer_id, pm_id, body): - row = central_api.update_payment_method(request.environ['context'], pm_id, - body.to_db()) - - return models.PaymentMethod.from_db(row) - - -@bp.delete('/merchants//customers//payment-methods/' - '') -def delete_payment_method(merchant_id, customer_id, pm_id): - central_api.delete_payment_method(request.environ['context'], pm_id) - return Response(status=204) - - -# Plans -@bp.post('/merchants//plans') -@signature(models.Plan, str, body=models.Plan) -def create_plan(merchant_id, body): - row = central_api.create_plan( - request.environ['context'], - merchant_id, - body.to_db()) - - return models.Plan.from_db(row) - - -@bp.get('/merchants//plans') -@signature([models.Plan], str, [Query]) -def list_plans(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = central_api.list_plans( - request.environ['context'], criterion=criterion) - - return map(models.Plan.from_db, rows) - - -@bp.get('/merchants//plans/') -@signature(models.Plan, str, str) -def get_plan(merchant_id, plan_id): - row = central_api.get_plan(request.environ['context'], - plan_id) - - return models.Plan.from_db(row) - - -@bp.put('/merchants//plans/') -@signature(models.Plan, str, str, body=models.Plan) -def update_plan(merchant_id, plan_id, body): - row = central_api.update_plan( - request.environ['context'], - plan_id, - body.to_db()) - - return models.Plan.from_db(row) - - -@bp.delete('/merchants//plans/') -def delete_plan(merchant_id, plan_id): - central_api.delete_plan(request.environ['context'], plan_id) - return Response(status=204) - - -# Plan Item -@bp.put('/merchants//plans//items/') -@signature(models.PlanItem, str, str, str) -def add_plan_item(merchant_id, plan_id, product_id): - values = { - 'plan_id': plan_id, - 'product_id': product_id - } - - row = central_api.create_plan_item(request.environ['context'], values) - - return models.PlanItem.from_db(row) - - -@bp.patch('/merchants//plans//items/') -@signature(models.PlanItem, str, str, str, body=models.PlanItem) -def update_plan_item(merchant_id, plan_id, product_id, body): - row = central_api.update_plan_item( - request.environ['context'], plan_id, product_id, body.to_db()) - - return models.PlanItem.from_db(row) - - -@bp.delete('/merchants//plans//items/') -def delete_plan_item(merchant_id, plan_id, product_id): - central_api.delete_plan_item(request.environ['context'], - plan_id, product_id) - return Response(status=204) - - -# Products -@bp.post('/merchants//products') -@signature(models.Product, str, body=models.Product) -def create_product(merchant_id, body): - row = central_api.create_product( - request.environ['context'], - merchant_id, - body.to_db()) - - return models.Product.from_db(row) - - -@bp.get('/merchants//products') -@signature([models.Product], str, [Query]) -def list_products(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = central_api.list_products( - request.environ['context'], criterion=criterion) - - return map(models.Product.from_db, rows) - - -@bp.get('/merchants//products/') -@signature(models.Product, str, str) -def get_product(merchant_id, product_id): - row = central_api.get_product(request.environ['context'], - product_id) - - return models.Product.from_db(row) - - -@bp.put('/merchants//products/') -@signature(models.Product, str, str, body=models.Product) -def update_product(merchant_id, product_id, body): - row = central_api.update_product( - request.environ['context'], - product_id, - body.to_db()) - - return models.Product.from_db(row) - - -@bp.delete('/merchants//products/') -def delete_product(merchant_id, product_id): - central_api.delete_product(request.environ['context'], product_id) - return Response(status=204) - - -# Invoices -@bp.post('/merchants//invoices') -@signature(models.Invoice, str, body=models.Invoice) -def create_invoice(merchant_id, body): - row = biller_api.create_invoice( - request.environ['context'], - merchant_id, - body.to_db()) - - return models.Invoice.from_db(row) - - -@bp.get('/merchants//invoices') -@signature([models.InvoiceState], str, [Query]) -def list_invoices(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = biller_api.list_invoices( - request.environ['context'], criterion=criterion) - - return map(models.Invoice.from_db, rows) - - -@bp.get('/merchants//invoices/') -@signature(models.Invoice, str, str) -def get_invoice(merchant_id, invoice_id): - row = biller_api.get_invoice(request.environ['context'], - invoice_id) - - return models.Invoice.from_db(row) - - -@bp.put('/merchants//invoices/') -@signature(models.Invoice, str, str, body=models.Invoice) -def update_invoice(merchant_id, invoice_id, body): - row = biller_api.update_invoice( - request.environ['context'], - invoice_id, - body.to_db()) - - return models.Invoice.from_db(row) - - -@bp.delete('/merchants//invoices/') -def delete_invoice(merchant_id, invoice_id): - biller_api.delete_invoice(request.environ['context'], invoice_id) - return Response(status=204) - - -# Products -@bp.post('/merchants//invoices//lines') -@signature(models.InvoiceLine, str, str, body=models.InvoiceLine) -def create_invoice_line(merchant_id, invoice_id, body): - row = biller_api.create_invoice_line( - request.environ['context'], - invoice_id, - body.to_db()) - - return models.Product.from_db(row) - - -@bp.get('/merchants//invoices//lines') -@signature([models.InvoiceLine], str, str, [Query]) -def list_invoice_lines(merchant_id, invoice_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id, - invoice_id=invoice_id) - - rows = biller_api.list_invoice_lines( - request.environ['context'], criterion=criterion) - - return map(models.Product.from_db, rows) - - -@bp.get('/merchants//invoices//lines/') -@signature(models.InvoiceLine, str, str, str) -def get_invoice_line(merchant_id, invoice_id, line_id): - row = biller_api.get_invoice_line(request.environ['context'], - line_id) - - return models.Product.from_db(row) - - -@bp.put('/merchants//invoices//lines/') -@signature(models.InvoiceLine, str, str, str, body=models.InvoiceLine) -def update_invoice_line(merchant_id, invoice_id, line_id, body): - row = biller_api.update_invoice_line( - request.environ['context'], - line_id, - body.as_dict()) - - return models.Product.from_db(row) - - -@bp.delete('/merchants//invoices//lines/') -def delete_invoice_line(merchant_id, invoice_id, line_id): - biller_api.delete_invoice_line(request.environ['context'], line_id) - return Response(status=204) - - -# Subscription -@bp.post('/merchants//subscriptions') -@signature(models.Subscription, str, body=models.Subscription) -def create_subscription(merchant_id, body): - row = central_api.create_subscription( - request.environ['context'], - body.to_db()) - - return models.Subscription.from_db(row) - - -@bp.get('/merchants//subscriptions') -@signature([models.Subscription], str, [Query]) -def list_subscriptions(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = central_api.list_subscriptions( - request.environ['context'], criterion=criterion) - - return map(models.Subscription.from_db, rows) - - -@bp.get('/merchants//subscriptions/') -@signature(models.Subscription, str, str) -def get_subscription(merchant_id, subscription_id): - row = central_api.get_subscription(request.environ['context'], - subscription_id) - - return models.Subscription.from_db(row) - - -@bp.put('/merchants//subscriptions/') -@signature(models.Subscription, str, str, body=models.Subscription) -def update_subscription(merchant_id, subscription_id, body): - row = central_api.update_subscription( - request.environ['context'], - subscription_id, - body.to_db()) - - return models.Subscription.from_db(row) - - -@bp.delete('/merchants//subscriptions/') -def delete_subscription(merchant_id, subscription_id): - central_api.delete_subscription( - request.environ['context'], - subscription_id) - return Response(status=204) - - -# Usage -@bp.post('/merchants//usage') -@signature(models.Usage, str, body=models.Usage) -def create_usage(merchant_id, body): - values = body.to_db() - - values['merchant_id'] = merchant_id - row = rater_api.create_usage(request.environ['context'], values) - - return models.Usage.from_db(row) - - -@bp.get('/merchants//usage') -@signature([models.Usage], str, [Query]) -def list_usages(merchant_id, q=[]): - criterion = _query_to_criterion(q, merchant_id=merchant_id) - - rows = rater_api.list_usages( - request.environ['context'], criterion=criterion) - - return map(models.Usage.from_db, rows) - - -@bp.get('/merchants//usage/') -@signature([models.Usage], str, str) -def get_usage(merchant_id, usage_id): - row = rater_api.get_usage(request.environ['context'], - usage_id) - - return models.Usage.from_db(row) - - -@bp.put('/merchants//usage/') -@signature(models.Usage, str, str, body=models.Usage) -def update_usage(merchant_id, usage_id, body): - row = rater_api.update_usage( - request.environ['context'], - usage_id, - body.to_db()) - - return models.Usage.from_db(row) - - -@bp.delete('/merchants//usage/') -def delete_usage(merchant_id, usage_id): - rater_api.delete_usage( - request.environ['context'], - usage_id) - return Response(status=204) diff --git a/billingstack/wsgi.py b/billingstack/api/v2/__init__.py similarity index 56% rename from billingstack/wsgi.py rename to billingstack/api/v2/__init__.py index 890c185..71751cb 100644 --- a/billingstack/wsgi.py +++ b/billingstack/api/v2/__init__.py @@ -1,6 +1,6 @@ -# Copyright 2012 Managed I.T. +# -*- encoding: utf-8 -*- # -# Author: Kiall Mac Innes +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -13,17 +13,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -# -# Copied: Moniker -from billingstack.openstack.common import wsgi - - -class Middleware(wsgi.Middleware): - @classmethod - def factory(cls, global_config, **local_conf): - """ Used for paste app factories in paste.deploy config files """ - - def _factory(app): - return cls(app, **local_conf) +from oslo.config import cfg - return _factory +cfg.CONF.import_opt('state_path', 'billingstack.paths') diff --git a/billingstack/api/versions.py b/billingstack/api/v2/controllers/__init__.py similarity index 53% rename from billingstack/api/versions.py rename to billingstack/api/v2/controllers/__init__.py index 5b94e41..f7ed5c6 100644 --- a/billingstack/api/versions.py +++ b/billingstack/api/v2/controllers/__init__.py @@ -1,6 +1,6 @@ -# Copyright 2012 Hewlett-Packard Development Company, L.P. All Rights Reserved. +# -*- encoding: utf-8 -*- # -# Author: Kiall Mac Innes +# Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -13,21 +13,3 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -# -# Copied: Moniker -import flask - - -def factory(global_config, **local_conf): - app = flask.Flask('billingstack.api.versions') - - @app.route('/', methods=['GET']) - def version_list(): - return flask.jsonify({ - "versions": [{ - "id": "v1", - "status": "CURRENT" - }] - }) - - return app diff --git a/billingstack/api/v2/controllers/currency.py b/billingstack/api/v2/controllers/currency.py new file mode 100644 index 0000000..6f7176d --- /dev/null +++ b/billingstack/api/v2/controllers/currency.py @@ -0,0 +1,67 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api + + +class CurrencyController(RestController): + def __init__(self, id_): + self.id_ = id_ + + @wsme_pecan.wsexpose(models.Currency) + def get_all(self): + row = central_api.get_currency(request.ctxt, self.id_) + + return models.Currency.from_db(row) + + @wsme.validate(models.Currency) + @wsme_pecan.wsexpose(models.Currency, body=models.Currency) + def patch(self, body): + row = central_api.update_currency(request.ctxt, self.id_, body.to_db()) + return models.Currency.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_currency(request.ctxt, self.id_) + + +class CurrenciesController(RestController): + @expose() + def _lookup(self, currency_id, *remainder): + return CurrencyController(currency_id), remainder + + @wsme.validate(models.Currency) + @wsme_pecan.wsexpose(models.Currency, body=models.Currency, + status_code=202) + def post(self, body): + row = central_api.create_currency(request.ctxt, body.to_db()) + + return models.Currency.from_db(row) + + @wsme_pecan.wsexpose([models.Currency], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion(q) + + rows = central_api.list_currencies( + request.ctxt, criterion=criterion) + + return map(models.Currency.from_db, rows) diff --git a/billingstack/api/v2/controllers/customer.py b/billingstack/api/v2/controllers/customer.py new file mode 100644 index 0000000..ea16ebd --- /dev/null +++ b/billingstack/api/v2/controllers/customer.py @@ -0,0 +1,74 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.api.v2.controllers.payment import PaymentMethodsController +from billingstack.central.rpcapi import central_api + + +class CustomerController(RestController): + payment_methods = PaymentMethodsController() + + def __init__(self, id_): + self.id_ = id_ + request.context['customer_id'] = id_ + + @wsme_pecan.wsexpose(models.Customer) + def get_all(self): + row = central_api.get_customer(request.ctxt, self.id_) + + return models.Customer.from_db(row) + + @wsme.validate(models.Customer) + @wsme_pecan.wsexpose(models.Customer, body=models.Customer) + def patch(self, body): + row = central_api.update_customer(request.ctxt, self.id_, body.to_db()) + return models.Customer.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_customer(request.ctxt, self.id_) + + +class CustomersController(RestController): + @expose() + def _lookup(self, customer_id, *remainder): + return CustomerController(customer_id), remainder + + @wsme.validate(models.Customer) + @wsme_pecan.wsexpose(models.Customer, body=models.Customer, + status_code=202) + def post(self, body): + row = central_api.create_customer( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Customer.from_db(row) + + @wsme_pecan.wsexpose([models.Customer], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion(q) + + rows = central_api.list_customers( + request.ctxt, criterion=criterion) + + return map(models.Customer.from_db, rows) diff --git a/billingstack/api/v2/controllers/invoice.py b/billingstack/api/v2/controllers/invoice.py new file mode 100644 index 0000000..3bc1b0e --- /dev/null +++ b/billingstack/api/v2/controllers/invoice.py @@ -0,0 +1,73 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.biller.rpcapi import biller_api + + +class InvoiceController(RestController): + def __init__(self, id_): + self.id_ = id_ + request.context['invoice_id'] = id_ + + @wsme_pecan.wsexpose(models.Invoice) + def get_all(self): + row = biller_api.get_invoice(request.ctxt, self.id_) + + return models.Invoice.from_db(row) + + @wsme.validate(models.Invoice) + @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) + def patch(self, body): + row = biller_api.update_invoice(request.ctxt, self.id_, body.to_db()) + + return models.Invoice.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + biller_api.delete_invoice(request.ctxt, self.id_) + + +class InvoicesController(RestController): + @expose() + def _lookup(self, invoice_id, *remainder): + return InvoiceController(invoice_id), remainder + + @wsme.validate(models.Invoice) + @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice, status_code=202) + def post(self, body): + row = biller_api.create_invoice( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Invoice.from_db(row) + + @wsme_pecan.wsexpose([models.Invoice], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, + merchant_id=request.context['merchant_id']) + + rows = biller_api.list_invoices( + request.ctxt, criterion=criterion) + + return map(models.Invoice.from_db, rows) diff --git a/billingstack/api/v2/controllers/invoice_state.py b/billingstack/api/v2/controllers/invoice_state.py new file mode 100644 index 0000000..0852a6a --- /dev/null +++ b/billingstack/api/v2/controllers/invoice_state.py @@ -0,0 +1,68 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.biller.rpcapi import biller_api + + +class InvoiceStateController(RestController): + def __init__(self, id_): + self.id_ = id_ + + @wsme_pecan.wsexpose(models.InvoiceState) + def get_all(self): + row = biller_api.get_invoice_state(request.ctxt, self.id_) + + return models.InvoiceState.from_db(row) + + @wsme.validate(models.InvoiceState) + @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) + def patch(self, body): + row = biller_api.update_invoice_state( + request.ctxt, self.id_, body.to_db()) + return models.InvoiceState.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + biller_api.delete_invoice_state(request.ctxt, self.id_) + + +class InvoiceStatesController(RestController): + @expose() + def _lookup(self, invoice_state_id, *remainder): + return InvoiceStateController(invoice_state_id), remainder + + @wsme.validate(models.InvoiceState) + @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState, + status_code=202) + def post(self, body): + row = biller_api.create_invoice_state(request.ctxt, body.to_db()) + + return models.InvoiceState.from_db(row) + + @wsme_pecan.wsexpose([models.InvoiceState], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion(q) + + rows = biller_api.list_invoice_states( + request.ctxt, criterion=criterion) + + return map(models.InvoiceState.from_db, rows) diff --git a/billingstack/api/v2/controllers/language.py b/billingstack/api/v2/controllers/language.py new file mode 100644 index 0000000..691f0d8 --- /dev/null +++ b/billingstack/api/v2/controllers/language.py @@ -0,0 +1,67 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api + + +class LanguageController(RestController): + def __init__(self, id_): + self.id_ = id_ + + @wsme_pecan.wsexpose(models.Language) + def get_all(self): + row = central_api.get_language(request.ctxt, self.id_) + + return models.Language.from_db(row) + + @wsme.validate(models.InvoiceState) + @wsme_pecan.wsexpose(models.Language, body=models.Language) + def patch(self, body): + row = central_api.update_language(request.ctxt, self.id_, body.to_db()) + return models.Language.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_language(request.ctxt, self.id_) + + +class LanguagesController(RestController): + @expose() + def _lookup(self, language_id, *remainder): + return LanguageController(language_id), remainder + + @wsme.validate(models.InvoiceState) + @wsme_pecan.wsexpose(models.Language, body=models.Language, + status_code=202) + def post(self, body): + row = central_api.create_language(request.ctxt, body.to_db()) + + return models.Language.from_db(row) + + @wsme_pecan.wsexpose([models.Language], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion(q) + + rows = central_api.list_languages( + request.ctxt, criterion=criterion) + + return map(models.Language.from_db, rows) diff --git a/billingstack/api/v2/controllers/merchant.py b/billingstack/api/v2/controllers/merchant.py new file mode 100644 index 0000000..e42ea74 --- /dev/null +++ b/billingstack/api/v2/controllers/merchant.py @@ -0,0 +1,85 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api +from billingstack.api.v2.controllers.customer import CustomersController +from billingstack.api.v2.controllers.payment import PGConfigsController +from billingstack.api.v2.controllers.plan import PlansController +from billingstack.api.v2.controllers.product import ProductsController +from billingstack.api.v2.controllers.subscription import \ + SubscriptionsController +from billingstack.api.v2.controllers.invoice import InvoicesController +from billingstack.api.v2.controllers.usage import UsagesController + + +class MerchantController(RestController): + customers = CustomersController() + payment_gateway_configurations = PGConfigsController() + plans = PlansController() + products = ProductsController() + subscriptions = SubscriptionsController() + + invoices = InvoicesController() + usage = UsagesController() + + def __init__(self, id_): + self.id_ = id_ + request.context['merchant_id'] = id_ + + @wsme_pecan.wsexpose(models.Merchant) + def get_all(self): + row = central_api.get_merchant(request.ctxt, self.id_) + + return models.Merchant.from_db(row) + + @wsme.validate(models.InvoiceState) + @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) + def patch(self, body): + row = central_api.update_merchant(request.ctxt, self.id_, body.to_db()) + return models.Merchant.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_merchant(request.ctxt, self.id_) + + +class MerchantsController(RestController): + @expose() + def _lookup(self, merchant_id, *remainder): + return MerchantController(merchant_id), remainder + + @wsme.validate(models.Merchant) + @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant, + status_code=202) + def post(self, body): + row = central_api.create_merchant(request.ctxt, body.to_db()) + + return models.Merchant.from_db(row) + + @wsme_pecan.wsexpose([models.Merchant], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion(q) + + rows = central_api.list_merchants( + request.ctxt, criterion=criterion) + + return map(models.Merchant.from_db, rows) diff --git a/billingstack/api/v2/controllers/payment.py b/billingstack/api/v2/controllers/payment.py new file mode 100644 index 0000000..bbf7ad9 --- /dev/null +++ b/billingstack/api/v2/controllers/payment.py @@ -0,0 +1,139 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api + + +class PGProviders(RestController): + @wsme_pecan.wsexpose([models.PGProvider], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion(q) + + rows = central_api.list_pg_providers( + request.ctxt, criterion=criterion) + + return map(models.PGProvider.from_db, rows) + + +class PGConfigController(RestController): + def __init__(self, id_): + self.id_ = id_ + + @wsme_pecan.wsexpose(models.PGConfig) + def get_all(self): + row = central_api.get_pg_config(request.ctxt, self.id_) + + return models.PGConfig.from_db(row) + + @wsme.validate(models.PGConfig) + @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) + def patch(self, body): + row = central_api.update_pg_config( + request.ctxt, + self.id_, + body.to_db()) + + return models.PGConfig.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_pg_config(request.ctxt, self.id_) + + +class PGConfigsController(RestController): + @expose() + def _lookup(self, method_id, *remainder): + return PGConfigController(method_id), remainder + + @wsme.validate(models.PGConfig) + @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig, + status_code=202) + def post(self, body): + row = central_api.create_pg_config( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.PGConfig.from_db(row) + + @wsme_pecan.wsexpose([models.PGConfig], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, merchant_id=request.context['merchant_id']) + + rows = central_api.list_pg_configs( + request.ctxt, criterion=criterion) + + return map(models.PGConfig.from_db, rows) + + +class PaymentMethodController(RestController): + def __init__(self, id_): + self.id_ = id_ + request.context['payment_method_id'] = id_ + + @wsme_pecan.wsexpose(models.PaymentMethod) + def get_all(self): + row = central_api.get_payment_method(request.ctxt, self.id_) + + return models.PaymentMethod.from_db(row) + + @wsme.validate(models.PaymentMethod) + @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) + def patch(self, body): + row = central_api.update_payment_method( + request.ctxt, + self.id_, + body.to_db()) + + return models.PaymentMethod.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_payment_method(request.ctxt, self.id_) + + +class PaymentMethodsController(RestController): + @expose() + def _lookup(self, method_id, *remainder): + return PaymentMethodController(method_id), remainder + + @wsme.validate(models.PaymentMethod) + @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod, + status_code=202) + def post(self, body): + row = central_api.create_payment_method( + request.ctxt, + request.context['customer_id'], + body.to_db()) + + return models.PaymentMethod.from_db(row) + + @wsme_pecan.wsexpose([models.PaymentMethod], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, merchant_id=request.context['merchant_id'], + customer_id=request.context['customer_id']) + + rows = central_api.list_payment_methods( + request.ctxt, criterion=criterion) + + return map(models.PaymentMethod.from_db, rows) diff --git a/billingstack/api/v2/controllers/plan.py b/billingstack/api/v2/controllers/plan.py new file mode 100644 index 0000000..519d8a8 --- /dev/null +++ b/billingstack/api/v2/controllers/plan.py @@ -0,0 +1,116 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api + + +class ItemController(RestController): + def __init__(self, id_): + self.id_ = id_ + + @wsme.validate(models.PlanItem) + @wsme_pecan.wsexpose(models.PlanItem, body=models.PlanItem) + def put(self, body): + values = { + 'plan_id': request.context['plan_id'], + 'product_id': self.id_ + } + + row = central_api.create_plan_item(request.ctxt, values) + + return models.PlanItem.from_db(row) + + @wsme.validate(models.PlanItem) + @wsme_pecan.wsexpose(models.PlanItem, body=models.PlanItem) + def patch(self, body): + row = central_api.update_plan_item( + request.ctxt, + request.context['plan_id'], + self.id_, + body.to_db()) + + return models.PlanItem.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self, id_): + central_api.delete_plan_item( + request.ctxt, + request.context['plan_id'], + id_) + + +class ItemsController(RestController): + @expose() + def _lookup(self, id_, *remainder): + return ItemController(id_), remainder + + +class PlanController(RestController): + items = ItemsController() + + def __init__(self, id_): + self.id_ = id_ + request.context['plan_id'] = id_ + + @wsme_pecan.wsexpose(models.Plan) + def get_all(self): + row = central_api.get_plan(request.ctxt, self.id_) + + return models.Plan.from_db(row) + + @wsme.validate(models.Plan) + @wsme_pecan.wsexpose(models.Plan, body=models.Plan) + def patch(self, body): + row = central_api.update_plan(request.ctxt, self.id_, body.to_db()) + + return models.Plan.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_plan(request.ctxt, self.id_) + + +class PlansController(RestController): + @expose() + def _lookup(self, plan_id, *remainder): + return PlanController(plan_id), remainder + + @wsme.validate(models.Plan) + @wsme_pecan.wsexpose(models.Plan, body=models.Plan, status_code=202) + def post(self, body): + row = central_api.create_plan( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Plan.from_db(row) + + @wsme_pecan.wsexpose([models.Plan], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, + merchant_id=request.context['merchant_id']) + + rows = central_api.list_plans( + request.ctxt, criterion=criterion) + + return map(models.Plan.from_db, rows) diff --git a/billingstack/api/v2/controllers/product.py b/billingstack/api/v2/controllers/product.py new file mode 100644 index 0000000..dae1ef3 --- /dev/null +++ b/billingstack/api/v2/controllers/product.py @@ -0,0 +1,74 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api + + +class ProductController(RestController): + def __init__(self, id_): + self.id_ = id_ + request.context['product_id'] = id_ + + @wsme_pecan.wsexpose(models.Product) + def get_all(self): + row = central_api.get_product(request.ctxt, self.id_) + + return models.Product.from_db(row) + + @wsme.validate(models.Product) + @wsme_pecan.wsexpose(models.Product, body=models.Product) + def patch(self, body): + row = central_api.update_product(request.ctxt, self.id_, body.to_db()) + + return models.Product.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_product(request.ctxt, self.id_) + + +class ProductsController(RestController): + @expose() + def _lookup(self, product_id, *remainder): + return ProductController(product_id), remainder + + @wsme.validate(models.Product) + @wsme_pecan.wsexpose(models.Product, body=models.Product, + status_code=202) + def post(self, body): + row = central_api.create_product( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Product.from_db(row) + + @wsme_pecan.wsexpose([models.Product], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, + merchant_id=request.context['merchant_id']) + + rows = central_api.list_products( + request.ctxt, criterion=criterion) + + return map(models.Product.from_db, rows) diff --git a/billingstack/api/v2/controllers/root.py b/billingstack/api/v2/controllers/root.py new file mode 100644 index 0000000..a75a04a --- /dev/null +++ b/billingstack/api/v2/controllers/root.py @@ -0,0 +1,42 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack.openstack.common import log +from billingstack.api.v2.controllers.currency import CurrenciesController +from billingstack.api.v2.controllers.language import LanguagesController +from billingstack.api.v2.controllers.merchant import MerchantsController +from billingstack.api.v2.controllers.invoice_state import \ + InvoiceStatesController +from billingstack.api.v2.controllers.payment import PGProviders + + +LOG = log.getLogger(__name__) + + +class V2Controller(object): + # Central + currencies = CurrenciesController() + languages = LanguagesController() + merchants = MerchantsController() + + # Biller + invoice_states = InvoiceStatesController() + + # Collector + payment_gateway_providers = PGProviders() + + +class RootController(object): + v2 = V2Controller() diff --git a/billingstack/api/v2/controllers/subscription.py b/billingstack/api/v2/controllers/subscription.py new file mode 100644 index 0000000..fc9cf98 --- /dev/null +++ b/billingstack/api/v2/controllers/subscription.py @@ -0,0 +1,75 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.central.rpcapi import central_api + + +class SubscriptionController(RestController): + def __init__(self, id_): + self.id_ = id_ + request.context['subscription_id'] = id_ + + @wsme_pecan.wsexpose(models.Subscription) + def get_all(self): + row = central_api.get_subscription(request.ctxt, self.id_) + + return models.Subscription.from_db(row) + + @wsme.validate(models.Subscription) + @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) + def patch(self, body): + row = central_api.update_subscription(request.ctxt, self.id_, + body.to_db()) + + return models.Subscription.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + central_api.delete_subscription(request.ctxt, self.id_) + + +class SubscriptionsController(RestController): + @expose() + def _lookup(self, subscription_id, *remainder): + return SubscriptionController(subscription_id), remainder + + @wsme.validate(models.Subscription) + @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription, + status_code=202) + def post(self, body): + row = central_api.create_subscription( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Subscription.from_db(row) + + @wsme_pecan.wsexpose([models.Subscription], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, + merchant_id=request.context['merchant_id']) + + rows = central_api.list_subscriptions( + request.ctxt, criterion=criterion) + + return map(models.Subscription.from_db, rows) diff --git a/billingstack/api/v2/controllers/usage.py b/billingstack/api/v2/controllers/usage.py new file mode 100644 index 0000000..3b00e73 --- /dev/null +++ b/billingstack/api/v2/controllers/usage.py @@ -0,0 +1,73 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from pecan import expose, request +import wsme +import wsmeext.pecan as wsme_pecan + + +from billingstack.api.base import Query, _query_to_criterion, RestController +from billingstack.api.v2 import models +from billingstack.rater.rpcapi import rater_api + + +class UsageController(RestController): + def __init__(self, id_): + self.id_ = id_ + request.context['usage_id'] = id_ + + @wsme_pecan.wsexpose(models.Usage) + def get_all(self): + row = rater_api.get_usage(request.ctxt, self.id_) + + return models.Usage.from_db(row) + + @wsme.validate(models.Usage) + @wsme_pecan.wsexpose(models.Usage, body=models.Usage) + def patch(self, body): + row = rater_api.update_usage(request.ctxt, self.id_, body.to_db()) + + return models.Usage.from_db(row) + + @wsme_pecan.wsexpose(None, status_code=204) + def delete(self): + rater_api.delete_usage(request.ctxt, self.id_) + + +class UsagesController(RestController): + @expose() + def _lookup(self, usage_id, *remainder): + return UsageController(usage_id), remainder + + @wsme.validate(models.Usage) + @wsme_pecan.wsexpose(models.Usage, body=models.Usage, status_code=202) + def post(self, body): + row = rater_api.create_usage( + request.ctxt, + request.context['merchant_id'], + body.to_db()) + + return models.Usage.from_db(row) + + @wsme_pecan.wsexpose([models.Usage], [Query]) + def get_all(self, q=[]): + criterion = _query_to_criterion( + q, + merchant_id=request.context['merchant_id']) + + rows = rater_api.list_usages( + request.ctxt, criterion=criterion) + + return map(models.Usage.from_db, rows) diff --git a/billingstack/api/v1/models.py b/billingstack/api/v2/models.py similarity index 100% rename from billingstack/api/v1/models.py rename to billingstack/api/v2/models.py diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index a6e4035..294e605 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -16,9 +16,8 @@ """ Base classes for API tests. """ -from billingstack.api.v1 import factory -from billingstack.api.middleware.errors import FaultWrapperMiddleware -from billingstack.api.auth import NoAuthContextMiddleware +import pecan.testing + from billingstack.openstack.common import jsonutils as json from billingstack.openstack.common import log from billingstack.tests.base import ServiceTestCase @@ -44,15 +43,9 @@ def _ensure_slash(self, path): def make_path(self, path): path = self._ensure_slash(path) if self.PATH_PREFIX: - path = path + self._ensure_slash(self.PATH_PREFIX) + path = self._ensure_slash(self.PATH_PREFIX) + path return path - def load_content(self, response): - try: - response.json = json.loads(response.data) - except ValueError: - response.json = None - def _query(self, queries): query_params = {'q.field': [], 'q.value': [], @@ -77,17 +70,15 @@ def get(self, path, headers=None, q=[], status_code=200, LOG.debug('GET: %s %r', path, all_params) - response = self.client.get(path, - content_type=content_type, - query_string=all_params, - headers=headers) + response = self.app.get( + path, + params=all_params, + headers=headers) - LOG.debug('GOT RESPONSE: %s', response.data) + LOG.debug('GOT RESPONSE: %s', response.body) self.assertEqual(response.status_code, status_code) - self.load_content(response) - return response def post(self, path, data, headers=None, content_type="application/json", @@ -97,18 +88,16 @@ def post(self, path, data, headers=None, content_type="application/json", LOG.debug('POST: %s %s', path, data) content = json.dumps(data) - response = self.client.post( + response = self.app.post( path, - data=content, + content, content_type=content_type, headers=headers) - LOG.debug('POST RESPONSE: %r' % response.data) + LOG.debug('POST RESPONSE: %r' % response.body) self.assertEqual(response.status_code, status_code) - self.load_content(response) - return response def put(self, path, data, headers=None, content_type="application/json", @@ -118,17 +107,34 @@ def put(self, path, data, headers=None, content_type="application/json", LOG.debug('PUT: %s %s', path, data) content = json.dumps(data) - response = self.client.put( + response = self.app.put( path, - data=content, + content, content_type=content_type, headers=headers) - LOG.debug('PUT RESPONSE: %r' % response.data) + LOG.debug('PUT RESPONSE: %r' % response.body) self.assertEqual(response.status_code, status_code) - self.load_content(response) + return response + + def patch_(self, path, data, headers=None, content_type="application/json", + q=[], status_code=200, **params): + path = self.make_path(path) + + LOG.debug('PUT: %s %s', path, data) + + content = json.dumps(data) + response = self.app.patch( + path, + content, + content_type=content_type, + headers=headers) + + LOG.debug('PATCH RESPONSE: %r', response.body) + + self.assertEqual(response.status_code, status_code) return response @@ -138,9 +144,7 @@ def delete(self, path, status_code=204, headers=None, q=[], **params): LOG.debug('DELETE: %s %r', path, all_params) - response = self.client.delete(path, query_string=all_params) - - #LOG.debug('DELETE RESPONSE: %r' % response.body) + response = self.app.delete(path, params=all_params) self.assertEqual(response.status_code, status_code) @@ -151,6 +155,7 @@ class FunctionalTest(ServiceTestCase, APITestMixin): """ billingstack.api base test """ + def setUp(self): super(FunctionalTest, self).setUp() @@ -159,7 +164,13 @@ def setUp(self): self.start_service('central') self.setSamples() - self.app = factory({}) - self.app.wsgi_app = FaultWrapperMiddleware(self.app.wsgi_app) - self.app.wsgi_app = NoAuthContextMiddleware(self.app.wsgi_app) - self.client = self.app.test_client() + self.app = self.make_app() + + def make_app(self): + self.config = { + 'app': { + 'root': 'billingstack.api.v2.controllers.root.RootController', + 'modules': ['billingstack.api'], + } + } + return pecan.testing.load_test_app(self.config) diff --git a/billingstack/tests/api/v1/__init__.py b/billingstack/tests/api/v1/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/api/v1/base.py b/billingstack/tests/api/v1/base.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/api/v2/__init__.py b/billingstack/tests/api/v2/__init__.py new file mode 100644 index 0000000..40d04f0 --- /dev/null +++ b/billingstack/tests/api/v2/__init__.py @@ -0,0 +1,5 @@ +from billingstack.tests.api.base import FunctionalTest + + +class V2Test(FunctionalTest): + PATH_PREFIX = '/v2' diff --git a/billingstack/tests/api/v1/test_currency.py b/billingstack/tests/api/v2/test_currency.py similarity index 92% rename from billingstack/tests/api/v1/test_currency.py rename to billingstack/tests/api/v2/test_currency.py index 34a7ec0..cdbd814 100644 --- a/billingstack/tests/api/v1/test_currency.py +++ b/billingstack/tests/api/v2/test_currency.py @@ -19,12 +19,12 @@ import logging -from billingstack.tests.api.base import FunctionalTest +from billingstack.tests.api.v2 import V2Test LOG = logging.getLogger(__name__) -class TestCurrency(FunctionalTest): +class TestCurrency(V2Test): __test__ = True path = "currencies" @@ -53,7 +53,7 @@ def test_update_currency(self): _, currency = self.create_currency(fixture=1) url = self.item_path(currency['name']) - resp = self.put(url, currency) + resp = self.patch_(url, currency) self.assertData(resp.json, currency) diff --git a/billingstack/tests/api/v1/test_customer.py b/billingstack/tests/api/v2/test_customer.py similarity index 92% rename from billingstack/tests/api/v1/test_customer.py rename to billingstack/tests/api/v2/test_customer.py index 5deca31..791a3c5 100644 --- a/billingstack/tests/api/v1/test_customer.py +++ b/billingstack/tests/api/v2/test_customer.py @@ -17,11 +17,11 @@ Test Customers. """ -from billingstack.tests.api.base import FunctionalTest -from billingstack.api.v1.models import Customer +from billingstack.tests.api.v2 import V2Test +from billingstack.api.v2.models import Customer -class TestCustomer(FunctionalTest): +class TestCustomer(V2Test): __test__ = True path = "merchants/%s/customers" @@ -69,7 +69,7 @@ def test_update_customer(self): expected['name'] = 'test' url = self.item_path(self.merchant['id'], customer['id']) - resp = self.put(url, customer) + resp = self.patch_(url, customer) self.assertData(resp.json, customer) diff --git a/billingstack/tests/api/v1/test_invoice_state.py b/billingstack/tests/api/v2/test_invoice_state.py similarity index 92% rename from billingstack/tests/api/v1/test_invoice_state.py rename to billingstack/tests/api/v2/test_invoice_state.py index 472ae7c..c1d3672 100644 --- a/billingstack/tests/api/v1/test_invoice_state.py +++ b/billingstack/tests/api/v2/test_invoice_state.py @@ -19,14 +19,14 @@ import logging -from billingstack.tests.api.base import FunctionalTest +from billingstack.tests.api.v2 import V2Test LOG = logging.getLogger(__name__) -class TestInvoiceState(FunctionalTest): +class TestInvoiceState(V2Test): __test__ = True - path = "invoice-states" + path = "invoice_states" def setUp(self): super(TestInvoiceState, self).setUp() @@ -59,7 +59,7 @@ def test_update_invoice_state(self): _, state = self.create_invoice_state() url = self.item_path(state['name']) - resp = self.put(url, state) + resp = self.patch_(url, state) self.assertData(resp.json, state) diff --git a/billingstack/tests/api/v1/test_language.py b/billingstack/tests/api/v2/test_language.py similarity index 92% rename from billingstack/tests/api/v1/test_language.py rename to billingstack/tests/api/v2/test_language.py index 8ed1195..6e60e7d 100644 --- a/billingstack/tests/api/v1/test_language.py +++ b/billingstack/tests/api/v2/test_language.py @@ -19,12 +19,12 @@ import logging -from billingstack.tests.api.base import FunctionalTest +from billingstack.tests.api.v2 import V2Test LOG = logging.getLogger(__name__) -class TestLanguage(FunctionalTest): +class TestLanguage(V2Test): __test__ = True path = "languages" @@ -53,7 +53,7 @@ def test_update_language(self): _, language = self.create_language(fixture=1) url = self.item_path(language['name']) - resp = self.put(url, language) + resp = self.patch_(url, language) self.assertData(resp.json, language) diff --git a/billingstack/tests/api/v1/test_merchant.py b/billingstack/tests/api/v2/test_merchant.py similarity index 88% rename from billingstack/tests/api/v1/test_merchant.py rename to billingstack/tests/api/v2/test_merchant.py index d53e2eb..419a65f 100644 --- a/billingstack/tests/api/v1/test_merchant.py +++ b/billingstack/tests/api/v2/test_merchant.py @@ -17,11 +17,11 @@ Test Merchants """ -from billingstack.tests.api.base import FunctionalTest -from billingstack.api.v1.models import Merchant +from billingstack.tests.api.v2 import V2Test +from billingstack.api.v2.models import Merchant -class TestMerchant(FunctionalTest): +class TestMerchant(V2Test): __test__ = True def fixture(self): @@ -51,7 +51,7 @@ def test_get_merchant(self): def test_update_merchant(self): expected = Merchant.from_db(self.merchant).as_dict() - resp = self.put('merchants/' + self.merchant['id'], expected) + resp = self.patch_('merchants/' + self.merchant['id'], expected) self.assertData(expected, resp.json) diff --git a/billingstack/tests/api/v1/test_payment_method.py b/billingstack/tests/api/v2/test_payment_method.py similarity index 94% rename from billingstack/tests/api/v1/test_payment_method.py rename to billingstack/tests/api/v2/test_payment_method.py index ef9b724..3db943b 100644 --- a/billingstack/tests/api/v1/test_payment_method.py +++ b/billingstack/tests/api/v2/test_payment_method.py @@ -19,14 +19,14 @@ import logging -from billingstack.tests.api.base import FunctionalTest +from billingstack.tests.api.v2 import V2Test LOG = logging.getLogger(__name__) -class TestPaymentMethod(FunctionalTest): +class TestPaymentMethod(V2Test): __test__ = True - path = "merchants/%s/customers/%s/payment-methods" + path = "merchants/%s/customers/%s/payment_methods" def setUp(self): super(TestPaymentMethod, self).setUp() @@ -82,7 +82,7 @@ def test_update_payment_method(self): self.customer['id'], method['id']) expected = dict(fixture, name='test2') - resp = self.put(url, expected) + resp = self.patch_(url, expected) self.assertData(expected, resp.json) def test_delete_payment_method(self): diff --git a/billingstack/tests/api/v1/test_plan.py b/billingstack/tests/api/v2/test_plan.py similarity index 93% rename from billingstack/tests/api/v1/test_plan.py rename to billingstack/tests/api/v2/test_plan.py index 10172ae..5cc0360 100644 --- a/billingstack/tests/api/v1/test_plan.py +++ b/billingstack/tests/api/v2/test_plan.py @@ -17,10 +17,10 @@ Test Plans """ -from billingstack.tests.api.base import FunctionalTest +from billingstack.tests.api.v2 import V2Test -class TestPlan(FunctionalTest): +class TestPlan(V2Test): __test__ = True path = "merchants/%s/plans" @@ -54,7 +54,7 @@ def test_update_plan(self): plan['name'] = 'test' url = self.item_path(self.merchant['id'], plan['id']) - resp = self.put(url, plan) + resp = self.patch_(url, plan) self.assertData(resp.json, plan) diff --git a/billingstack/tests/api/v1/test_product.py b/billingstack/tests/api/v2/test_product.py similarity index 93% rename from billingstack/tests/api/v1/test_product.py rename to billingstack/tests/api/v2/test_product.py index c794ccd..3c3ffab 100644 --- a/billingstack/tests/api/v1/test_product.py +++ b/billingstack/tests/api/v2/test_product.py @@ -19,12 +19,12 @@ import logging -from billingstack.tests.api.base import FunctionalTest +from billingstack.tests.api.v2 import V2Test LOG = logging.getLogger(__name__) -class TestProduct(FunctionalTest): +class TestProduct(V2Test): __test__ = True path = "merchants/%s/products" @@ -57,7 +57,7 @@ def test_update_product(self): product['name'] = 'test' url = self.item_path(self.merchant['id'], product['id']) - resp = self.put(url, product) + resp = self.patch_(url, product) self.assertData(resp.json, product) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index dd482fe..5e91f7c 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -305,6 +305,17 @@ def get_fixture(self, name, fixture=0, values={}): _values.update(values) return _values + def path_get(self, project_file=None): + root = os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', + '..', + ) + ) + if project_file: + return os.path.join(root, project_file) + else: + return root + class Services(dict): def __getattr__(self, name): diff --git a/bin/billingstack-api b/bin/billingstack-api deleted file mode 100644 index c80bb1c..0000000 --- a/bin/billingstack-api +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -import sys -import eventlet - -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack import utils -from billingstack.api import service as api_service -from billingstack.service import prepare_service - - -prepare_service(sys.argv) - -logging.setup('wsme') - -launcher = service.launch(api_service.Service(), - cfg.CONF['service:api'].workers) -launcher.wait() diff --git a/etc/billingstack/api-paste.ini.sample b/etc/billingstack/api-paste.ini.sample deleted file mode 100644 index a55b033..0000000 --- a/etc/billingstack/api-paste.ini.sample +++ /dev/null @@ -1,36 +0,0 @@ -[composite:bs_api] -use = egg:Paste#urlmap -/: bs_api_versions -/v1: bs_core_api_v1 - -[app:bs_api_versions] -paste.app_factory = billingstack.api.versions:factory - -[composite:bs_core_api_v1] -use = call:billingstack.api.auth:pipeline_factory -noauth = noauthcontext faultwrapper bs_core_app_v1 -keystone = authtoken keystonecontext faultwrapper bs_core_app_v1 - -[app:bs_core_app_v1] -paste.app_factory = billingstack.api.v1:factory - -[filter:faultwrapper] -paste.filter_factory = billingstack.api.middleware.errors:FaultWrapperMiddleware.factory - -[filter:noauthcontext] -paste.filter_factory = billingstack.api.auth:NoAuthContextMiddleware.factory - -#[filter:keystonecontext] -#paste.filter_factory = billingstack.api.auth:KeystoneContextMiddleware.factory - -[filter:authtoken] -paste.filter_factory = keystoneclient.middleware.auth_token:filter_factory -service_protocol = http -service_host = 127.0.0.1 -service_port = 5000 -auth_host = 127.0.0.1 -auth_port = 35357 -auth_protocol = http -admin_tenant_name = %SERVICE_TENANT_NAME% -admin_user = %SERVICE_USER% -admin_password = %SERVICE_PASSWORD% diff --git a/setup.cfg b/setup.cfg index 9158949..e1b193e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -27,10 +27,10 @@ packages = scripts = bin/billingstack-db-manage bin/billingstack-manage - bin/billingstack-api [entry_points] console_scripts = + billingstack-api = billingstack.api.app:start billingstack-biller = billingstack.biller.service:launch billingstack-central = billingstack.central.service:launch billingstack-collector = billingstack.collector.service:launch diff --git a/tools/pip-requires b/tools/pip-requires index 115534b..6fe90f7 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -5,7 +5,7 @@ argparse cliff>=1.4 eventlet>=0.12.0 extras -flask==0.9 +pecan iso8601>=0.1.4 netaddr oslo.config>=1.1.0 @@ -15,4 +15,4 @@ pycountry routes>=1.12.3 stevedore>=0.9 webob>=1.2.3,<1.3 -wsme>=0.5b2 +https://bitbucket.org/cdevienne/wsme/get/tip.zip#egg=WSME From f9ab0b1633173a28ebf6928528b254de683a5b61 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sun, 4 Aug 2013 15:42:48 +0200 Subject: [PATCH 173/182] Move Payment logic into Collector process. * Adds storage layer to Collector * All code from Central related to payments, credit cards etc moved Change-Id: Ibd2d7d54ded239d7a3b8423c25b3291ca54792b2 --- billingstack/api/v2/controllers/payment.py | 36 +- billingstack/api/v2/models.py | 8 +- billingstack/central/rpcapi.py | 57 -- billingstack/central/service.py | 33 -- .../storage/impl_sqlalchemy/__init__.py | 147 +----- .../central/storage/impl_sqlalchemy/models.py | 109 +--- billingstack/collector/rpcapi.py | 57 ++ billingstack/collector/service.py | 50 +- billingstack/collector/storage/__init__.py | 108 ++++ .../collector/storage/impl_sqlalchemy.py | 258 +++++++++ billingstack/manage/provider.py | 2 +- billingstack/payment_gateway/__init__.py | 2 +- billingstack/rater/storage/__init__.py | 7 - billingstack/sqlalchemy/utils.py | 14 + billingstack/tests/api/base.py | 3 + .../tests/api/v2/test_payment_method.py | 32 +- billingstack/tests/base.py | 26 +- .../tests/central/storage/__init__.py | 249 +++++++++ billingstack/tests/central/storage/base.py | 493 ------------------ .../tests/central/storage/test_sqlalchemy.py | 2 +- .../tests/collector/storage/__init__.py | 293 +++++++++++ .../collector/storage/test_sqlalchemy.py | 29 ++ setup.cfg | 4 + tools/resync_storage.py | 2 +- 24 files changed, 1122 insertions(+), 899 deletions(-) create mode 100644 billingstack/collector/storage/__init__.py create mode 100644 billingstack/collector/storage/impl_sqlalchemy.py delete mode 100644 billingstack/tests/central/storage/base.py create mode 100644 billingstack/tests/collector/storage/test_sqlalchemy.py diff --git a/billingstack/api/v2/controllers/payment.py b/billingstack/api/v2/controllers/payment.py index bbf7ad9..8ad9a2f 100644 --- a/billingstack/api/v2/controllers/payment.py +++ b/billingstack/api/v2/controllers/payment.py @@ -19,7 +19,7 @@ from billingstack.api.base import Query, _query_to_criterion, RestController from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api +from billingstack.collector.rpcapi import collector_api class PGProviders(RestController): @@ -27,7 +27,7 @@ class PGProviders(RestController): def get_all(self, q=[]): criterion = _query_to_criterion(q) - rows = central_api.list_pg_providers( + rows = collector_api.list_pg_providers( request.ctxt, criterion=criterion) return map(models.PGProvider.from_db, rows) @@ -39,14 +39,14 @@ def __init__(self, id_): @wsme_pecan.wsexpose(models.PGConfig) def get_all(self): - row = central_api.get_pg_config(request.ctxt, self.id_) + row = collector_api.get_pg_config(request.ctxt, self.id_) return models.PGConfig.from_db(row) @wsme.validate(models.PGConfig) @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) def patch(self, body): - row = central_api.update_pg_config( + row = collector_api.update_pg_config( request.ctxt, self.id_, body.to_db()) @@ -55,7 +55,7 @@ def patch(self, body): @wsme_pecan.wsexpose(None, status_code=204) def delete(self): - central_api.delete_pg_config(request.ctxt, self.id_) + collector_api.delete_pg_config(request.ctxt, self.id_) class PGConfigsController(RestController): @@ -67,10 +67,12 @@ def _lookup(self, method_id, *remainder): @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig, status_code=202) def post(self, body): - row = central_api.create_pg_config( + values = body.to_db() + values['merchant_id'] = request.context['merchant_id'] + + row = collector_api.create_pg_config( request.ctxt, - request.context['merchant_id'], - body.to_db()) + values) return models.PGConfig.from_db(row) @@ -79,7 +81,7 @@ def get_all(self, q=[]): criterion = _query_to_criterion( q, merchant_id=request.context['merchant_id']) - rows = central_api.list_pg_configs( + rows = collector_api.list_pg_configs( request.ctxt, criterion=criterion) return map(models.PGConfig.from_db, rows) @@ -92,14 +94,14 @@ def __init__(self, id_): @wsme_pecan.wsexpose(models.PaymentMethod) def get_all(self): - row = central_api.get_payment_method(request.ctxt, self.id_) + row = collector_api.get_payment_method(request.ctxt, self.id_) return models.PaymentMethod.from_db(row) @wsme.validate(models.PaymentMethod) @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) def patch(self, body): - row = central_api.update_payment_method( + row = collector_api.update_payment_method( request.ctxt, self.id_, body.to_db()) @@ -108,7 +110,7 @@ def patch(self, body): @wsme_pecan.wsexpose(None, status_code=204) def delete(self): - central_api.delete_payment_method(request.ctxt, self.id_) + collector_api.delete_payment_method(request.ctxt, self.id_) class PaymentMethodsController(RestController): @@ -120,10 +122,10 @@ def _lookup(self, method_id, *remainder): @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod, status_code=202) def post(self, body): - row = central_api.create_payment_method( - request.ctxt, - request.context['customer_id'], - body.to_db()) + values = body.to_db() + values['customer_id'] = request.context['customer_id'] + + row = collector_api.create_payment_method(request.ctxt, values) return models.PaymentMethod.from_db(row) @@ -133,7 +135,7 @@ def get_all(self, q=[]): q, merchant_id=request.context['merchant_id'], customer_id=request.context['customer_id']) - rows = central_api.list_payment_methods( + rows = collector_api.list_payment_methods( request.ctxt, criterion=criterion) return map(models.PaymentMethod.from_db, rows) diff --git a/billingstack/api/v2/models.py b/billingstack/api/v2/models.py index 144cee4..3831206 100644 --- a/billingstack/api/v2/models.py +++ b/billingstack/api/v2/models.py @@ -155,8 +155,11 @@ class Usage(Base): class PGConfig(Base): name = text title = text + + merchant_id = text provider_id = text + is_default = bool properties = DictType(key_type=text, value_type=property_type) @@ -165,6 +168,7 @@ class PaymentMethod(Base): identifier = text expires = text + merchant_id = text customer_id = text provider_config_id = text @@ -186,15 +190,11 @@ class Merchant(Account): def to_db(self): values = self.as_dict() change_suffixes(values, self._keys, shorten=False) - if 'default_gateway' in values: - values['default_gateway_id'] = values.pop('default_gateway') return values @classmethod def from_db(cls, values): change_suffixes(values, cls._keys) - if 'default_gateway_id' in values: - values['default_gateway'] = values.pop('default_gateway_id') return cls(**values) diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py index 173a788..cbca8be 100644 --- a/billingstack/central/rpcapi.py +++ b/billingstack/central/rpcapi.py @@ -86,63 +86,6 @@ def update_contact_info(self, ctxt, id_, values): def delete_contact_info(self, ctxt, id_): return self.call(ctxt, self.make_msg('delete_contact_info', id_=id_)) - # PGP - def list_pg_providers(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_providers', - criterion=criterion)) - - def get_pg_provider(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_provider', id_=id_)) - - # PGM - def list_pg_methods(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_methods', - criterion=criterion)) - - def get_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_method', id_=id_)) - - def delete_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_pg_method', id_=id_)) - - # PGC - def create_pg_config(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_pg_config', - merchant_id=merchant_id, values=values)) - - def list_pg_configs(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_configs', - criterion=criterion)) - - def get_pg_config(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_config', id_=id_)) - - def update_pg_config(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_pg_config', id_=id_, - values=values)) - - def delete_pg_config(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_pg_config', id_=id_)) - - # PaymentMethod - def create_payment_method(self, ctxt, customer_id, values): - return self.call(ctxt, self.make_msg('create_payment_method', - customer_id=customer_id, values=values)) - - def list_payment_methods(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_payment_methods', - criterion=criterion)) - - def get_payment_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_payment_method', id_=id_)) - - def update_payment_method(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_payment_method', id_=id_, - values=values)) - - def delete_payment_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_payment_method', id_=id_)) - # Merchant def create_merchant(self, ctxt, values): return self.call(ctxt, self.make_msg('create_merchant', values=values)) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index 6f038db..f2dd63d 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -100,39 +100,6 @@ def list_pg_providers(self, ctxt, **kw): def get_pg_provider(self, ctxt, pgp_id): return self.storage_conn.get_pg_provider(ctxt, pgp_id) - # PGC - def create_pg_config(self, ctxt, merchant_id, values): - return self.storage_conn.create_pg_config(ctxt, merchant_id, values) - - def list_pg_configs(self, ctxt, **kw): - return self.storage_conn.list_pg_configs(ctxt, **kw) - - def get_pg_config(self, ctxt, id_): - return self.storage_conn.get_pg_config(ctxt, id_) - - def update_pg_config(self, ctxt, id_, values): - return self.storage_conn.update_pg_config(ctxt, id_, values) - - def delete_pg_config(self, ctxt, id_): - return self.storage_conn.delete_pg_config(ctxt, id_) - - # PM - def create_payment_method(self, ctxt, customer_id, values): - return self.storage_conn.create_payment_method( - ctxt, customer_id, values) - - def list_payment_methods(self, ctxt, **kw): - return self.storage_conn.list_payment_methods(ctxt, **kw) - - def get_payment_method(self, ctxt, id_, **kw): - return self.storage_conn.get_payment_method(ctxt, id_) - - def update_payment_method(self, ctxt, id_, values): - return self.storage_conn.update_payment_method(ctxt, id_, values) - - def delete_payment_method(self, ctxt, id_): - return self.storage_conn.delete_payment_method(ctxt, id_) - # Merchant def create_merchant(self, ctxt, values): return self.storage_conn.create_merchant(ctxt, values) diff --git a/billingstack/central/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py index a5802b9..60b6434 100644 --- a/billingstack/central/storage/impl_sqlalchemy/__init__.py +++ b/billingstack/central/storage/impl_sqlalchemy/__init__.py @@ -33,16 +33,6 @@ cfg.CONF.register_opts(SQLOPTS, group='central:sqlalchemy') -def filter_merchant_by_join(query, cls, criterion): - if criterion and 'merchant_id' in criterion: - merchant_id = criterion.pop('merchant_id') - if not hasattr(cls, 'merchant_id'): - raise RuntimeError('No merchant_id attribute on %s' % cls) - - query = query.join(cls).filter(cls.merchant_id == merchant_id) - return query - - class SQLAlchemyEngine(StorageEngine): __plugin_name__ = 'sqlalchemy' @@ -176,132 +166,6 @@ def update_contact_info(self, ctxt, id_, values): def delete_contact_info(self, ctxt, id_): self._delete(models.ContactInfo, id_) - # Payment Gateway Providers - def pg_provider_register(self, ctxt, values): - """ - Register a Provider and it's Methods - """ - values = values.copy() - methods = values.pop('methods', []) - - query = self.session.query(models.PGProvider)\ - .filter_by(name=values['name']) - - try: - provider = query.one() - except exc.NoResultFound: - provider = models.PGProvider() - - provider.update(values) - - self._set_provider_methods(ctxt, provider, methods) - - self._save(provider) - return self._dict(provider, extra=['methods']) - - def list_pg_providers(self, ctxt, **kw): - """ - List available PG Providers - """ - rows = self._list(models.PGProvider, **kw) - return [self._dict(r, extra=['methods']) for r in rows] - - def get_pg_provider(self, ctxt, pgp_id): - row = self._get(models.PGProvider, pgp_id) - return self._dict(row, extra=['methods']) - - def pg_provider_deregister(self, ctxt, id_): - self._delete(models.PGProvider, id_) - - def _get_provider_methods(self, provider): - """ - Used internally to form a "Map" of the Providers methods - """ - methods = {} - for m in provider.methods: - methods[m.key()] = m - return methods - - def _set_provider_methods(self, ctxt, provider, config_methods): - """Helper method for setting the Methods for a Provider""" - existing = self._get_provider_methods(provider) - for method in config_methods: - self._set_method(provider, method, existing) - - def _set_method(self, provider, method, existing): - key = models.PGMethod.make_key(method) - - if key in existing: - existing[key].update(method) - else: - row = models.PGMethod(**method) - provider.methods.append(row) - - # Payment Gateway Configuration - def create_pg_config(self, ctxt, merchant_id, values): - merchant = self._get(models.Merchant, merchant_id) - - row = models.PGConfig(**values) - row.merchant = merchant - - self._save(row) - return dict(row) - - def list_pg_configs(self, ctxt, **kw): - rows = self._list(models.PGConfig, **kw) - return map(dict, rows) - - def get_pg_config(self, ctxt, id_): - row = self._get(models.PGConfig, id_) - return dict(row) - - def update_pg_config(self, ctxt, id_, values): - row = self._update(models.PGConfig, id_, values) - return dict(row) - - def delete_pg_config(self, ctxt, id_): - self._delete(models.PGConfig, id_) - - # PaymentMethod - def create_payment_method(self, ctxt, customer_id, values): - """ - Configure a PaymentMethod like a CreditCard - """ - customer = self._get_id_or_name(models.Customer, customer_id) - - # NOTE: Attempt to see if there's a default gateway if none is - # specified - if not values.get('provider_config_id') and \ - customer.merchant.default_gateway: - values['provider_config_id'] = customer.merchant.default_gateway_id - - row = models.PaymentMethod(**values) - row.customer = customer - - self._save(row) - return self._dict(row) - - def list_payment_methods(self, ctxt, criterion=None, **kw): - query = self.session.query(models.PaymentMethod) - - query = filter_merchant_by_join(query, models.Customer, criterion) - - rows = self._list(query=query, cls=models.PaymentMethod, - criterion=criterion, **kw) - - return [self._dict(row) for row in rows] - - def get_payment_method(self, ctxt, id_, **kw): - row = self._get_id_or_name(models.PaymentMethod, id_) - return self._dict(row) - - def update_payment_method(self, ctxt, id_, values): - row = self._update(models.PaymentMethod, id_, values) - return self._dict(row) - - def delete_payment_method(self, ctxt, id_): - self._delete(models.PaymentMethod, id_) - # Merchant def create_merchant(self, ctxt, values): row = models.Merchant(**values) @@ -595,10 +459,15 @@ def list_subscriptions(self, ctxt, criterion=None, **kw): """ query = self.session.query(models.Subscription) - query = filter_merchant_by_join(query, models.Customer, criterion) + # NOTE: Filter needs to be joined for merchant_id + query = db_utils.filter_merchant_by_join( + query, models.Customer, criterion) - rows = self._list(query=query, cls=models.Subscription, - criterion=criterion, **kw) + rows = self._list( + query=query, + cls=models.Subscription, + criterion=criterion, + **kw) return map(self._subscription, rows) diff --git a/billingstack/central/storage/impl_sqlalchemy/models.py b/billingstack/central/storage/impl_sqlalchemy/models.py index 1d9afab..72f578f 100644 --- a/billingstack/central/storage/impl_sqlalchemy/models.py +++ b/billingstack/central/storage/impl_sqlalchemy/models.py @@ -46,62 +46,6 @@ class Language(BASE): title = Column(Unicode(100), nullable=False) -class PGProvider(BASE, BaseMixin): - """ - A Payment Gateway - The thing that processes a Payment Method - - This is registered either by the Admin or by the PaymentGateway plugin - """ - __tablename__ = 'pg_provider' - - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - properties = Column(JSON) - - methods = relationship( - 'PGMethod', - backref='provider', - lazy='joined') - - def method_map(self): - return self.attrs_map(['provider_methods']) - - -class PGMethod(BASE, BaseMixin): - """ - This represents a PaymentGatewayProviders method with some information - like name, type etc to describe what is in other settings known as a - "CreditCard" - - Example: - A Visa card: {"type": "creditcard", "visa"} - """ - __tablename__ = 'pg_method' - - name = Column(Unicode(100), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - type = Column(Unicode(100), nullable=False) - properties = Column(JSON) - - # NOTE: This is so a PGMethod can be "owned" by a Provider, meaning that - # other Providers should not be able to use it. - provider_id = Column(UUID, ForeignKey( - 'pg_provider.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - @staticmethod - def make_key(data): - return '%(type)s:%(name)s' % data - - def key(self): - return self.make_key(self) - - class ContactInfo(BASE, BaseMixin): """ Contact Information about an entity like a User, Customer etc... @@ -146,20 +90,10 @@ class Merchant(BASE, BaseMixin): title = Column(Unicode(60)) customers = relationship('Customer', backref='merchant') - payment_gateways = relationship( - 'PGConfig', backref='merchant', - primaryjoin='merchant.c.id==pg_config.c.merchant_id') plans = relationship('Plan', backref='merchant') products = relationship('Product', backref='merchant') - default_gateway = relationship( - 'PGConfig', uselist=False, - primaryjoin='merchant.c.id==pg_config.c.merchant_id') - default_gateway_id = Column(UUID, ForeignKey('pg_config.id', - use_alter=True, name='default_gateway'), - nullable=True) - currency = relationship('Currency', uselist=False, backref='merchants') currency_name = Column(Unicode(10), ForeignKey('currency.name'), nullable=False) @@ -169,27 +103,6 @@ class Merchant(BASE, BaseMixin): nullable=False) -class PGConfig(BASE, BaseMixin): - """ - A Merchant's configuration of a PaymentGateway like api keys, url and more - """ - __tablename__ = 'pg_config' - - name = Column(Unicode(100), nullable=False) - title = Column(Unicode(100)) - - properties = Column(JSON) - - # Link to the Merchant - merchant_id = Column(UUID, ForeignKey('merchant.id'), nullable=False) - - provider = relationship('PGProvider', - backref='merchant_configurations') - provider_id = Column(UUID, ForeignKey('pg_provider.id', - onupdate='CASCADE'), - nullable=False) - - class Customer(BASE, BaseMixin): """ A Customer is linked to a Merchant and can have Users related to it @@ -200,8 +113,6 @@ class Customer(BASE, BaseMixin): merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), nullable=False) - payment_methods = relationship('PaymentMethod', backref='customer') - contact_info = relationship( 'CustomerInfo', backref='customer', @@ -225,22 +136,6 @@ class Customer(BASE, BaseMixin): language_name = Column(Unicode(10), ForeignKey('language.name')) -class PaymentMethod(BASE, BaseMixin): - name = Column(Unicode(255), nullable=False) - - identifier = Column(Unicode(255), nullable=False) - expires = Column(Unicode(255)) - - properties = Column(JSON) - - customer_id = Column(UUID, ForeignKey('customer.id', onupdate='CASCADE'), - nullable=False) - - provider_config = relationship('PGConfig', backref='payment_methods') - provider_config_id = Column(UUID, ForeignKey('pg_config.id', - onupdate='CASCADE'), nullable=False) - - class Plan(BASE, BaseMixin): """ A Product collection like a "Virtual Web Cluster" with 10 servers @@ -330,6 +225,4 @@ class Subscription(BASE, BaseMixin): customer_id = Column(UUID, ForeignKey('customer.id', ondelete='CASCADE'), nullable=False) - payment_method = relationship('PaymentMethod', backref='subscriptions') - payment_method_id = Column(UUID, ForeignKey('payment_method.id', - ondelete='CASCADE', onupdate='CASCADE')) + payment_method_id = Column(UUID) diff --git a/billingstack/collector/rpcapi.py b/billingstack/collector/rpcapi.py index f678f6e..cb58cd8 100644 --- a/billingstack/collector/rpcapi.py +++ b/billingstack/collector/rpcapi.py @@ -33,5 +33,62 @@ def __init__(self): topic=cfg.CONF.collector_topic, default_version=self.BASE_RPC_VERSION) + # PGP + def list_pg_providers(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_providers', + criterion=criterion)) + + def get_pg_provider(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_pg_provider', id_=id_)) + + # PGM + def list_pg_methods(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_methods', + criterion=criterion)) + + def get_pg_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_pg_method', id_=id_)) + + def delete_pg_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_pg_method', id_=id_)) + + # PGC + def create_pg_config(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_pg_config', + values=values)) + + def list_pg_configs(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_pg_configs', + criterion=criterion)) + + def get_pg_config(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_pg_config', id_=id_)) + + def update_pg_config(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_pg_config', id_=id_, + values=values)) + + def delete_pg_config(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_pg_config', id_=id_)) + + # PaymentMethod + def create_payment_method(self, ctxt, values): + return self.call(ctxt, self.make_msg('create_payment_method', + values=values)) + + def list_payment_methods(self, ctxt, criterion=None): + return self.call(ctxt, self.make_msg('list_payment_methods', + criterion=criterion)) + + def get_payment_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('get_payment_method', id_=id_)) + + def update_payment_method(self, ctxt, id_, values): + return self.call(ctxt, self.make_msg('update_payment_method', id_=id_, + values=values)) + + def delete_payment_method(self, ctxt, id_): + return self.call(ctxt, self.make_msg('delete_payment_method', id_=id_)) + collector_api = CollectorAPI() diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index dc5e910..d88aed3 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -23,6 +23,7 @@ from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service from billingstack.openstack.common import service as os_service +from billingstack.storage.utils import get_connection from billingstack.central.rpcapi import CentralAPI from billingstack import service as bs_service @@ -47,26 +48,49 @@ def __init__(self, *args, **kwargs): # Get a storage connection self.central_api = CentralAPI() + def start(self): + self.storage_conn = get_connection('collector') + super(Service, self).start() + def wait(self): super(Service, self).wait() self.conn.consumer_thread.wait() - def get_pg_provider(self, ctxt, pg_info): - """ - Work out a PGC config either from pg_info or via ctxt fetching it - from central. - Return the appropriate PGP for this info. + # PGP + def list_pg_providers(self, ctxt, **kw): + return self.storage_conn.list_pg_providers(ctxt, **kw) + + # PGC + def create_pg_config(self, ctxt, values): + return self.storage_conn.create_pg_config(ctxt, values) + + def list_pg_configs(self, ctxt, **kw): + return self.storage_conn.list_pg_configs(ctxt, **kw) + + def get_pg_config(self, ctxt, id_): + return self.storage_conn.get_pg_config(ctxt, id_) + + def update_pg_config(self, ctxt, id_, values): + return self.storage_conn.update_pg_config(ctxt, id_, values) + + def delete_pg_config(self, ctxt, id_): + return self.storage_conn.delete_pg_config(ctxt, id_) + + # PM + def create_payment_method(self, ctxt, values): + return self.storage_conn.create_payment_method(ctxt, values) + + def list_payment_methods(self, ctxt, **kw): + return self.storage_conn.list_payment_methods(ctxt, **kw) - :param ctxt: Request context - :param pg_info: Payment Gateway Config... - """ + def get_payment_method(self, ctxt, id_, **kw): + return self.storage_conn.get_payment_method(ctxt, id_) - def create_account(self, ctxt, values, pg_config=None): - """ - Create an Account on the underlying provider + def update_payment_method(self, ctxt, id_, values): + return self.storage_conn.update_payment_method(ctxt, id_, values) - :param values: The account values - """ + def delete_payment_method(self, ctxt, id_): + return self.storage_conn.delete_payment_method(ctxt, id_) def launch(): diff --git a/billingstack/collector/storage/__init__.py b/billingstack/collector/storage/__init__.py new file mode 100644 index 0000000..1fa53f1 --- /dev/null +++ b/billingstack/collector/storage/__init__.py @@ -0,0 +1,108 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack.storage import base + + +class StorageEngine(base.StorageEngine): + """Base class for the collector storage""" + __plugin_ns__ = 'billingstack.collector.storage' + + +class Connection(base.Connection): + """Define the base API for collector storage""" + def pg_provider_register(self): + """ + Register a Provider and it's Methods + """ + raise NotImplementedError + + def list_pg_providers(self, ctxt, **kw): + """ + List available PG Providers + """ + raise NotImplementedError + + def get_pg_provider(self, ctxt, id_): + """ + Get a PaymentGateway Provider + """ + raise NotImplementedError + + def pg_provider_deregister(self, ctxt, id_): + """ + De-register a PaymentGateway Provider (Plugin) and all it's methods + """ + raise NotImplementedError + + def create_pg_config(self, ctxt, values): + """ + Create a PaymentGateway Configuration + """ + raise NotImplementedError + + def list_pg_configs(self, ctxt, **kw): + """ + List PaymentGateway Configurations + """ + raise NotImplementedError + + def get_pg_config(self, ctxt, id_): + """ + Get a PaymentGateway Configuration + """ + raise NotImplementedError + + def update_pg_config(self, ctxt, id_, values): + """ + Update a PaymentGateway Configuration + """ + raise NotImplementedError + + def delete_pg_config(self, ctxt, id_): + """ + Delete a PaymentGateway Configuration + """ + raise NotImplementedError + + def create_payment_method(self, ctxt, values): + """ + Configure a PaymentMethod like a CreditCard + """ + raise NotImplementedError + + def list_payment_methods(self, ctxt, criterion=None, **kw): + """ + List a Customer's PaymentMethods + """ + raise NotImplementedError + + def get_payment_method(self, ctxt, id_, **kw): + """ + Get a Customer's PaymentMethod + """ + raise NotImplementedError + + def update_payment_method(self, ctxt, id_, values): + """ + Update a Customer's PaymentMethod + """ + raise NotImplementedError + + def delete_payment_method(self, ctxt, id_): + """ + Delete a Customer's PaymentMethod + """ + raise NotImplementedError diff --git a/billingstack/collector/storage/impl_sqlalchemy.py b/billingstack/collector/storage/impl_sqlalchemy.py new file mode 100644 index 0000000..8db8695 --- /dev/null +++ b/billingstack/collector/storage/impl_sqlalchemy.py @@ -0,0 +1,258 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from oslo.config import cfg + + +from sqlalchemy import Column, ForeignKey +from sqlalchemy import Unicode +from sqlalchemy.orm import exc, relationship +from sqlalchemy.ext.declarative import declarative_base + +from billingstack.collector.storage import Connection, StorageEngine +from billingstack.openstack.common import log as logging +from billingstack.sqlalchemy.types import JSON, UUID +from billingstack.sqlalchemy import api, model_base, session, utils + + +LOG = logging.getLogger(__name__) + + +BASE = declarative_base(cls=model_base.ModelBase) + + +cfg.CONF.register_group(cfg.OptGroup( + name='collector:sqlalchemy', + title='Config for collector sqlalchemy plugin')) + +cfg.CONF.register_opts(session.SQLOPTS, group='collector:sqlalchemy') + + +class PGProvider(BASE, model_base.BaseMixin): + """ + A Payment Gateway - The thing that processes a Payment Method + + This is registered either by the Admin or by the PaymentGateway plugin + """ + __tablename__ = 'pg_provider' + + name = Column(Unicode(60), nullable=False) + title = Column(Unicode(100)) + description = Column(Unicode(255)) + + properties = Column(JSON) + + methods = relationship( + 'PGMethod', + backref='provider', + lazy='joined') + + def method_map(self): + return self.attrs_map(['provider_methods']) + + +class PGMethod(BASE, model_base.BaseMixin): + """ + This represents a PaymentGatewayProviders method with some information + like name, type etc to describe what is in other settings known as a + "CreditCard" + + Example: + A Visa card: {"type": "creditcard", "visa"} + """ + __tablename__ = 'pg_method' + + name = Column(Unicode(100), nullable=False) + title = Column(Unicode(100)) + description = Column(Unicode(255)) + + type = Column(Unicode(100), nullable=False) + properties = Column(JSON) + + # NOTE: This is so a PGMethod can be "owned" by a Provider, meaning that + # other Providers should not be able to use it. + provider_id = Column(UUID, ForeignKey( + 'pg_provider.id', + ondelete='CASCADE', + onupdate='CASCADE')) + + @staticmethod + def make_key(data): + return '%(type)s:%(name)s' % data + + def key(self): + return self.make_key(self) + + +class PGConfig(BASE, model_base.BaseMixin): + """ + A Merchant's configuration of a PaymentGateway like api keys, url and more + """ + __tablename__ = 'pg_config' + + name = Column(Unicode(100), nullable=False) + title = Column(Unicode(100)) + + properties = Column(JSON) + + # Link to the Merchant + merchant_id = Column(UUID, nullable=False) + + provider = relationship('PGProvider', + backref='merchant_configurations') + provider_id = Column(UUID, ForeignKey('pg_provider.id', + onupdate='CASCADE'), + nullable=False) + + +class PaymentMethod(BASE, model_base.BaseMixin): + name = Column(Unicode(255), nullable=False) + + identifier = Column(Unicode(255), nullable=False) + expires = Column(Unicode(255)) + + properties = Column(JSON) + + customer_id = Column(UUID, nullable=False) + + provider_config = relationship('PGConfig', backref='payment_methods', + lazy='joined') + provider_config_id = Column(UUID, ForeignKey('pg_config.id', + onupdate='CASCADE'), nullable=False) + + +class SQLAlchemyEngine(StorageEngine): + __plugin_name__ = 'sqlalchemy' + + def get_connection(self): + return Connection() + + +class Connection(Connection, api.HelpersMixin): + def __init__(self): + self.setup('collector:sqlalchemy') + + def base(self): + return BASE + + # Payment Gateway Providers + def pg_provider_register(self, ctxt, values): + values = values.copy() + methods = values.pop('methods', []) + + query = self.session.query(PGProvider)\ + .filter_by(name=values['name']) + + try: + provider = query.one() + except exc.NoResultFound: + provider = PGProvider() + + provider.update(values) + + self._set_provider_methods(ctxt, provider, methods) + + self._save(provider) + return self._dict(provider, extra=['methods']) + + def list_pg_providers(self, ctxt, **kw): + rows = self._list(PGProvider, **kw) + return [self._dict(r, extra=['methods']) for r in rows] + + def get_pg_provider(self, ctxt, id_, **kw): + row = self._get(PGProvider, id_) + return self._dict(row, extra=['methods']) + + def pg_provider_deregister(self, ctxt, id_): + self._delete(PGProvider, id_) + + def _get_provider_methods(self, provider): + """ + Used internally to form a "Map" of the Providers methods + """ + methods = {} + for m in provider.methods: + methods[m.key()] = m + return methods + + def _set_provider_methods(self, ctxt, provider, config_methods): + """Helper method for setting the Methods for a Provider""" + existing = self._get_provider_methods(provider) + for method in config_methods: + self._set_method(provider, method, existing) + + def _set_method(self, provider, method, existing): + key = PGMethod.make_key(method) + + if key in existing: + existing[key].update(method) + else: + row = PGMethod(**method) + provider.methods.append(row) + + # Payment Gateway Configuration + def create_pg_config(self, ctxt, values): + row = PGConfig(**values) + + self._save(row) + return dict(row) + + def list_pg_configs(self, ctxt, **kw): + rows = self._list(PGConfig, **kw) + return map(dict, rows) + + def get_pg_config(self, ctxt, id_, **kw): + row = self._get(PGConfig, id_, **kw) + return dict(row) + + def update_pg_config(self, ctxt, id_, values): + row = self._update(PGConfig, id_, values) + return dict(row) + + def delete_pg_config(self, ctxt, id_): + self._delete(PGConfig, id_) + + # PaymentMethod + def create_payment_method(self, ctxt, values): + row = PaymentMethod(**values) + + self._save(row) + return self._dict(row) + + def list_payment_methods(self, ctxt, criterion=None, **kw): + query = self.session.query(PaymentMethod) + + # NOTE: Filter needs to be joined for merchant_id + query = utils.filter_merchant_by_join( + query, PGConfig, criterion) + + rows = self._list( + cls=PaymentMethod, + query=query, + criterion=criterion, + **kw) + + return [self._dict(row) for row in rows] + + def get_payment_method(self, ctxt, id_, **kw): + row = self._get_id_or_name(PaymentMethod, id_) + return self._dict(row) + + def update_payment_method(self, ctxt, id_, values): + row = self._update(PaymentMethod, id_, values) + return self._dict(row) + + def delete_payment_method(self, ctxt, id_): + self._delete(PaymentMethod, id_) diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py index faa5ecb..d23e1c3 100644 --- a/billingstack/manage/provider.py +++ b/billingstack/manage/provider.py @@ -31,7 +31,7 @@ def execute(self, parsed_args): class ProvidersList(DatabaseCommand, ListCommand): def execute(self, parsed_args): context = get_admin_context() - conn = self.get_connection('central') + conn = self.get_connection('collector') data = conn.list_pg_providers(context) diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py index 265a015..a9dcf32 100644 --- a/billingstack/payment_gateway/__init__.py +++ b/billingstack/payment_gateway/__init__.py @@ -47,7 +47,7 @@ def _register(ep, context, conn): def register_providers(context): - conn = get_connection('central') + conn = get_connection('collector') em = ExtensionManager(Provider.__plugin_ns__) em.map(_register, context, conn) diff --git a/billingstack/rater/storage/__init__.py b/billingstack/rater/storage/__init__.py index 078b25c..6402efe 100644 --- a/billingstack/rater/storage/__init__.py +++ b/billingstack/rater/storage/__init__.py @@ -13,7 +13,6 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. -from oslo.config import cfg from billingstack.storage import base @@ -38,9 +37,3 @@ def update_usage(self, ctxt, id_, values): def delete_usage(self, ctxt, id_): raise NotImplementedError - - -def get_connection(): - name = cfg.CONF['service:rater'].storage_driver - plugin = StorageEngine.get_plugin(name, invoke_on_load=True) - return plugin.get_connection() diff --git a/billingstack/sqlalchemy/utils.py b/billingstack/sqlalchemy/utils.py index 50e0465..e8ad070 100644 --- a/billingstack/sqlalchemy/utils.py +++ b/billingstack/sqlalchemy/utils.py @@ -42,3 +42,17 @@ def is_valid_id(id_): return True else: return False + + +def filter_merchant_by_join(query, cls, criterion, pop=True): + if criterion and 'merchant_id' in criterion: + if not hasattr(cls, 'merchant_id'): + raise RuntimeError('No merchant_id attribute on %s' % cls) + + query = query.join(cls).filter( + cls.merchant_id == criterion['merchant_id']) + + if pop: + criterion.pop('merchant_id') + + return query diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py index 294e605..5dace2d 100644 --- a/billingstack/tests/api/base.py +++ b/billingstack/tests/api/base.py @@ -162,6 +162,9 @@ def setUp(self): # NOTE: Needs to be started after the db schema is created self.start_storage('central') self.start_service('central') + + self.start_storage('collector') + self.start_service('collector') self.setSamples() self.app = self.make_app() diff --git a/billingstack/tests/api/v2/test_payment_method.py b/billingstack/tests/api/v2/test_payment_method.py index 3db943b..cf3849e 100644 --- a/billingstack/tests/api/v2/test_payment_method.py +++ b/billingstack/tests/api/v2/test_payment_method.py @@ -30,11 +30,16 @@ class TestPaymentMethod(V2Test): def setUp(self): super(TestPaymentMethod, self).setUp() + self.start_storage('collector') + self.start_service('collector') _, self.provider = self.pg_provider_register() _, self.customer = self.create_customer(self.merchant['id']) - _, self.pg_config = self.create_pg_config( - self.merchant['id'], values={'provider_id': self.provider['id']}) + + values = { + 'provider_id': self.provider['id'], + 'merchant_id': self.merchant['id']} + _, self.pg_config = self.create_pg_config(values=values) def test_create_payment_method(self): fixture = self.get_fixture('payment_method') @@ -48,9 +53,10 @@ def test_create_payment_method(self): def test_list_payment_methods(self): values = { - 'provider_config_id': self.pg_config['id'] + 'provider_config_id': self.pg_config['id'], + 'customer_id': self.customer['id'] } - self.create_payment_method(self.customer['id'], values=values) + self.create_payment_method(values=values) url = self.path % (self.merchant['id'], self.customer['id']) resp = self.get(url) @@ -59,10 +65,10 @@ def test_list_payment_methods(self): def test_get_payment_method(self): values = { - 'provider_config_id': self.pg_config['id'] + 'provider_config_id': self.pg_config['id'], + 'customer_id': self.customer['id'] } - _, method = self.create_payment_method( - self.customer['id'], values=values) + _, method = self.create_payment_method(values=values) url = self.item_path(self.merchant['id'], self.customer['id'], method['id']) @@ -73,10 +79,10 @@ def test_get_payment_method(self): def test_update_payment_method(self): values = { - 'provider_config_id': self.pg_config['id'] + 'provider_config_id': self.pg_config['id'], + 'customer_id': self.customer['id'] } - fixture, method = self.create_payment_method( - self.customer['id'], values=values) + fixture, method = self.create_payment_method(values=values) url = self.item_path(self.merchant['id'], self.customer['id'], method['id']) @@ -87,10 +93,10 @@ def test_update_payment_method(self): def test_delete_payment_method(self): values = { - 'provider_config_id': self.pg_config['id'] + 'provider_config_id': self.pg_config['id'], + 'customer_id': self.customer['id'] } - _, method = self.create_payment_method( - self.customer['id'], values=values) + _, method = self.create_payment_method(values=values) url = self.item_path(self.merchant['id'], self.customer['id'], method['id']) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py index 5e91f7c..71db82b 100644 --- a/billingstack/tests/base.py +++ b/billingstack/tests/base.py @@ -117,8 +117,12 @@ def migrate(self): def post_init(self): if self.fixture.database_connection == "sqlite://": conn = self.fixture.connection.engine.connect() - self._as_string = "".join( - l for l in conn.connection.iterdump()) + try: + self._as_string = "".join( + l for l in conn.connection.iterdump()) + except Exception: + print "".join(l for l in conn.connection.iterdump()) + raise self.fixture.connection.engine.dispose() else: cleandb = paths.state_path_rel(self.sqlite_clean_db) @@ -338,8 +342,8 @@ def setUp(self): # NOTE: No services up by default self.services = Services() - def get_admin_context(self): - return get_admin_context() + def get_admin_context(self, **kw): + return get_admin_context(**kw) def get_context(self, **kw): return RequestContext(**kw) @@ -431,7 +435,7 @@ def pg_provider_register(self, fixture=0, values={}, **kw): fixture['methods'] = [self.get_fixture('pg_method')] ctxt = kw.pop('context', self.admin_ctxt) - data = self.services.central.storage_conn.pg_provider_register( + data = self.services.collector.storage_conn.pg_provider_register( ctxt, fixture, **kw) return fixture, data @@ -445,12 +449,12 @@ def create_merchant(self, fixture=0, values={}, **kw): return fixture, self.services.central.create_merchant( ctxt, fixture, **kw) - def create_pg_config(self, merchant_id, fixture=0, values={}, + def create_pg_config(self, fixture=0, values={}, **kw): fixture = self.get_fixture('pg_config', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_pg_config( - ctxt, merchant_id, fixture, **kw) + return fixture, self.services.collector.create_pg_config( + ctxt, fixture, **kw) def create_customer(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('customer', fixture, values) @@ -459,11 +463,11 @@ def create_customer(self, merchant_id, fixture=0, values={}, **kw): return fixture, self.services.central.create_customer( ctxt, merchant_id, fixture, **kw) - def create_payment_method(self, customer_id, fixture=0, values={}, **kw): + def create_payment_method(self, fixture=0, values={}, **kw): fixture = self.get_fixture('payment_method', fixture, values) ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_payment_method( - ctxt, customer_id, fixture, **kw) + return fixture, self.services.collector.create_payment_method( + ctxt, fixture, **kw) def user_add(self, merchant_id, fixture=0, values={}, **kw): fixture = self.get_fixture('user', fixture, values) diff --git a/billingstack/tests/central/storage/__init__.py b/billingstack/tests/central/storage/__init__.py index e69de29..bb6ed54 100644 --- a/billingstack/tests/central/storage/__init__.py +++ b/billingstack/tests/central/storage/__init__.py @@ -0,0 +1,249 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack.openstack.common import log as logging +from billingstack.central.storage.impl_sqlalchemy import models + + +LOG = logging.getLogger(__name__) + + +UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' + + +class DriverMixin(object): + def create_language(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('language', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) + + def create_currency(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('currency', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) + + def create_merchant(self, fixture=0, values={}, **kw): + fixture = self.get_fixture('merchant', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + + self._account_defaults(fixture) + + return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) + + def create_customer(self, merchant_id, fixture=0, values={}, **kw): + fixture = self.get_fixture('customer', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + self._account_defaults(fixture) + return fixture, self.storage_conn.create_customer( + ctxt, merchant_id, fixture, **kw) + + def create_product(self, merchant_id, fixture=0, values={}, **kw): + fixture = self.get_fixture('product', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_product( + ctxt, merchant_id, fixture, **kw) + + def create_plan(self, merchant_id, fixture=0, values={}, **kw): + fixture = self.get_fixture('plan', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_plan( + ctxt, merchant_id, fixture, **kw) + + # Currencies + def test_create_currency(self): + self.assertDuplicate(self.create_currency) + + # Languages + def test_create_language(self): + self.assertDuplicate(self.create_language) + + def test_set_properties(self): + fixture, data = self.create_product(self.merchant['id']) + + metadata = {"random": True} + self.storage_conn.set_properties(data['id'], metadata, + cls=models.Product) + + metadata.update({'foo': 1, 'bar': 2}) + self.storage_conn.set_properties(data['id'], metadata, + cls=models.Product) + + actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) + self.assertLen(6, actual['properties']) + + # Merchant + def test_create_merchant(self): + fixture, data = self.create_merchant() + self.assertData(fixture, data) + + def test_get_merchant(self): + _, expected = self.create_merchant() + actual = self.storage_conn.get_merchant( + self.admin_ctxt, expected['id']) + self.assertData(expected, actual) + + def test_get_merchant_missing(self): + self.assertMissing(self.storage_conn.get_merchant, + self.admin_ctxt, UUID) + + def test_update_merchant(self): + fixture, data = self.create_merchant() + + fixture['name'] = 'test' + updated = self.storage_conn.update_merchant( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_merchant_missing(self): + self.assertMissing(self.storage_conn.update_merchant, + self.admin_ctxt, UUID, {}) + + def test_delete_merchant(self): + self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) + self.assertMissing(self.storage_conn.get_merchant, + self.admin_ctxt, self.merchant['id']) + + def test_delete_merchant_missing(self): + self.assertMissing(self.storage_conn.delete_merchant, + self.admin_ctxt, UUID) + + # Customer + def test_create_customer(self): + fixture, data = self.create_customer(self.merchant['id']) + assert data['default_info'] == {} + assert data['contact_info'] == [] + self.assertData(fixture, data) + + def test_create_customer_with_contact_info(self): + contact_fixture = self.get_fixture('contact_info') + customer_fixture, data = self.create_customer( + self.merchant['id'], + values={'contact_info': contact_fixture}) + self.assertData(customer_fixture, data) + self.assertData(contact_fixture, data['default_info']) + self.assertData(contact_fixture, data['contact_info'][0]) + + def test_get_customer(self): + _, expected = self.create_customer(self.merchant['id']) + actual = self.storage_conn.get_customer( + self.admin_ctxt, expected['id']) + self.assertData(expected, actual) + + def test_get_customer_missing(self): + self.assertMissing(self.storage_conn.get_customer, + self.admin_ctxt, UUID) + + def test_update_customer(self): + fixture, data = self.create_customer(self.merchant['id']) + + fixture['name'] = 'test' + updated = self.storage_conn.update_customer( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_customer_missing(self): + self.assertMissing(self.storage_conn.update_customer, + self.admin_ctxt, UUID, {}) + + def test_delete_customer(self): + _, data = self.create_customer(self.merchant['id']) + self.storage_conn.delete_customer(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_customer, + self.admin_ctxt, data['id']) + + def test_delete_customer_missing(self): + self.assertMissing(self.storage_conn.delete_customer, + self.admin_ctxt, UUID) + + # Products + def test_create_product(self): + f, data = self.create_product(self.merchant['id']) + self.assertData(f, data) + + def test_get_product(self): + f, expected = self.create_product(self.merchant['id']) + actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) + self.assertData(expected, actual) + + def test_get_product_missing(self): + self.assertMissing(self.storage_conn.get_product, + self.admin_ctxt, UUID) + + def test_update_product(self): + fixture, data = self.create_product(self.merchant['id']) + + fixture['name'] = 'test' + updated = self.storage_conn.update_product( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_product_missing(self): + self.assertMissing(self.storage_conn.update_product, + self.admin_ctxt, UUID, {}) + + def test_delete_product(self): + fixture, data = self.create_product(self.merchant['id']) + self.storage_conn.delete_product(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_product, + self.admin_ctxt, data['id']) + + def test_delete_product_missing(self): + self.assertMissing(self.storage_conn.delete_product, + self.admin_ctxt, UUID) + + # Plan + def test_create_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + self.assertData(fixture, data) + + def test_get_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) + + # FIXME(ekarlso): This should test the actual items also? But atm + # there's am error that if the value is int when getting added it's + # string when returned... + self.assertEqual(data['name'], actual['name']) + self.assertEqual(data['title'], actual['title']) + self.assertEqual(data['description'], actual['description']) + + def test_get_plan_missing(self): + self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) + + def test_update_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + + fixture['name'] = 'test' + updated = self.storage_conn.update_plan( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_plan_missing(self): + self.assertMissing(self.storage_conn.update_plan, + self.admin_ctxt, UUID, {}) + + def test_delete_plan(self): + fixture, data = self.create_plan(self.merchant['id']) + self.storage_conn.delete_plan(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_plan, + self.admin_ctxt, data['id']) + + def test_delete_plan_missing(self): + self.assertMissing(self.storage_conn.delete_plan, + self.admin_ctxt, UUID) diff --git a/billingstack/tests/central/storage/base.py b/billingstack/tests/central/storage/base.py deleted file mode 100644 index 1a4c3b6..0000000 --- a/billingstack/tests/central/storage/base.py +++ /dev/null @@ -1,493 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log as logging -from billingstack.central.storage.impl_sqlalchemy import models - - -LOG = logging.getLogger(__name__) - - -UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' - - -class DriverMixin(object): - def create_language(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('language', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) - - def create_currency(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('currency', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) - - def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): - methods = [self.get_fixture('pg_method')] or methods - if not 'methods' in values: - values['methods'] = methods - - fixture = self.get_fixture('pg_provider', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - data = self.storage_conn.pg_provider_register( - ctxt, fixture.copy(), **kw) - - return fixture, data - - def create_merchant(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('merchant', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - self._account_defaults(fixture) - - return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - - def create_pg_config(self, merchant_id, fixture=0, values={}, - **kw): - fixture = self.get_fixture('pg_config', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_pg_config( - ctxt, merchant_id, fixture, **kw) - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('customer', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - self._account_defaults(fixture) - return fixture, self.storage_conn.create_customer( - ctxt, merchant_id, fixture, **kw) - - def create_payment_method(self, customer_id, fixture=0, - values={}, **kw): - fixture = self.get_fixture('payment_method', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_payment_method( - ctxt, customer_id, fixture, **kw) - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('product', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_product( - ctxt, merchant_id, fixture, **kw) - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('plan', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_plan( - ctxt, merchant_id, fixture, **kw) - - # Currencies - def test_create_currency(self): - self.assertDuplicate(self.create_currency) - - # Languages - def test_create_language(self): - self.assertDuplicate(self.create_language) - - def test_set_properties(self): - fixture, data = self.create_product(self.merchant['id']) - - metadata = {"random": True} - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - metadata.update({'foo': 1, 'bar': 2}) - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) - self.assertLen(6, actual['properties']) - - # Payment Gateways - def test_pg_provider_register(self): - fixture, actual = self.pg_provider_register() - self.assertEqual(fixture['name'], actual['name']) - self.assertEqual(fixture['title'], actual['title']) - self.assertEqual(fixture['description'], actual['description']) - self.assertData(fixture['methods'][0], actual['methods'][0]) - - def test_pg_provider_register_different_methods(self): - # Add a Global method - method1 = {'type': 'creditcard', 'name': 'mastercard'} - method2 = {'type': 'creditcard', 'name': 'amex'} - method3 = {'type': 'creditcard', 'name': 'visa'} - - provider = {'name': 'noop', 'methods': [method1, method2, method3]} - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, provider) - - # TODO(ekarls): Make this more extensive? - self.assertLen(3, provider['methods']) - - def test_get_pg_provider(self): - _, expected = self.pg_provider_register() - actual = self.storage_conn.get_pg_provider(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - def test_get_pg_provider_missing(self): - self.assertMissing(self.storage_conn.get_pg_provider, - self.admin_ctxt, UUID) - - def test_pg_provider_deregister(self): - _, data = self.pg_provider_register() - self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, data['id']) - - def test_pg_provider_deregister_missing(self): - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, UUID) - - # Payment Gateway Configuration - def test_create_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - self.assertData(fixture, data) - - def test_get_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - def test_get_pg_config_missing(self): - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, UUID) - - def test_update_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - fixture['properties'] = {"api": 1} - updated = self.storage_conn.update_pg_config( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_pg_config_missing(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - self.assertMissing(self.storage_conn.update_pg_config, - self.admin_ctxt, UUID, {}) - - def test_delete_pg_config(self): - _, provider = self.pg_provider_register() - - values = {'provider_id': provider['id']} - - fixture, data = self.create_pg_config( - self.merchant['id'], values=values) - - self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, data['id']) - - def test_delete_pg_config_missing(self): - self.assertMissing(self.storage_conn.delete_pg_config, - self.admin_ctxt, UUID) - - # PaymentMethod - def test_create_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method( - customer['id'], values=values) - self.assertData(fixture, data) - - def test_get_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - _, expected = self.create_payment_method( - customer['id'], values=values) - actual = self.storage_conn.get_payment_method(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - # TODO(ekarlso): Make this test more extensive? - def test_list_payment_methods(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - - values = { - 'provider_config_id': config['id']} - - # Add two Customers with some methods - _, customer1 = self.create_customer(self.merchant['id']) - self.create_payment_method( - customer1['id'], values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer1['id']}) - self.assertLen(1, rows) - - _, customer2 = self.create_customer(self.merchant['id']) - self.create_payment_method( - customer2['id'], values=values) - self.create_payment_method( - customer2['id'], values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer2['id']}) - self.assertLen(2, rows) - - def test_get_payment_method_missing(self): - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, UUID) - - def test_update_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method( - customer['id'], values=values) - - fixture['identifier'] = 1 - updated = self.storage_conn.update_payment_method(self.admin_ctxt, - data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_payment_method_missing(self): - self.assertMissing(self.storage_conn.update_payment_method, - self.admin_ctxt, UUID, {}) - - def test_delete_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - _, config = self.create_pg_config( - self.merchant['id'], values={'provider_id': provider['id']}) - _, customer = self.create_customer(self.merchant['id']) - - # Setup PaymentMethod - values = { - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method( - customer['id'], values=values) - - self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, data['id']) - - def test_delete_payment_method_missing(self): - self.assertMissing(self.storage_conn.delete_payment_method, - self.admin_ctxt, UUID) - - # Merchant - def test_create_merchant(self): - fixture, data = self.create_merchant() - self.assertData(fixture, data) - - def test_get_merchant(self): - _, expected = self.create_merchant() - actual = self.storage_conn.get_merchant( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_merchant_missing(self): - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, UUID) - - def test_update_merchant(self): - fixture, data = self.create_merchant() - - fixture['name'] = 'test' - updated = self.storage_conn.update_merchant( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_merchant_missing(self): - self.assertMissing(self.storage_conn.update_merchant, - self.admin_ctxt, UUID, {}) - - def test_delete_merchant(self): - self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, self.merchant['id']) - - def test_delete_merchant_missing(self): - self.assertMissing(self.storage_conn.delete_merchant, - self.admin_ctxt, UUID) - - # Customer - def test_create_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - assert data['default_info'] == {} - assert data['contact_info'] == [] - self.assertData(fixture, data) - - def test_create_customer_with_contact_info(self): - contact_fixture = self.get_fixture('contact_info') - customer_fixture, data = self.create_customer( - self.merchant['id'], - values={'contact_info': contact_fixture}) - self.assertData(customer_fixture, data) - self.assertData(contact_fixture, data['default_info']) - self.assertData(contact_fixture, data['contact_info'][0]) - - def test_get_customer(self): - _, expected = self.create_customer(self.merchant['id']) - actual = self.storage_conn.get_customer( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_customer_missing(self): - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, UUID) - - def test_update_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_customer( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_customer_missing(self): - self.assertMissing(self.storage_conn.update_customer, - self.admin_ctxt, UUID, {}) - - def test_delete_customer(self): - _, data = self.create_customer(self.merchant['id']) - self.storage_conn.delete_customer(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, data['id']) - - def test_delete_customer_missing(self): - self.assertMissing(self.storage_conn.delete_customer, - self.admin_ctxt, UUID) - - # Products - def test_create_product(self): - f, data = self.create_product(self.merchant['id']) - self.assertData(f, data) - - def test_get_product(self): - f, expected = self.create_product(self.merchant['id']) - actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_product_missing(self): - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, UUID) - - def test_update_product(self): - fixture, data = self.create_product(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_product( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_product_missing(self): - self.assertMissing(self.storage_conn.update_product, - self.admin_ctxt, UUID, {}) - - def test_delete_product(self): - fixture, data = self.create_product(self.merchant['id']) - self.storage_conn.delete_product(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, data['id']) - - def test_delete_product_missing(self): - self.assertMissing(self.storage_conn.delete_product, - self.admin_ctxt, UUID) - - # Plan - def test_create_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.assertData(fixture, data) - - def test_get_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) - - # FIXME(ekarlso): This should test the actual items also? But atm - # there's am error that if the value is int when getting added it's - # string when returned... - self.assertEqual(data['name'], actual['name']) - self.assertEqual(data['title'], actual['title']) - self.assertEqual(data['description'], actual['description']) - - def test_get_plan_missing(self): - self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) - - def test_update_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_plan( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_plan_missing(self): - self.assertMissing(self.storage_conn.update_plan, - self.admin_ctxt, UUID, {}) - - def test_delete_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.storage_conn.delete_plan(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_plan, - self.admin_ctxt, data['id']) - - def test_delete_plan_missing(self): - self.assertMissing(self.storage_conn.delete_plan, - self.admin_ctxt, UUID) diff --git a/billingstack/tests/central/storage/test_sqlalchemy.py b/billingstack/tests/central/storage/test_sqlalchemy.py index c9e59cf..38b7653 100644 --- a/billingstack/tests/central/storage/test_sqlalchemy.py +++ b/billingstack/tests/central/storage/test_sqlalchemy.py @@ -17,7 +17,7 @@ # Copied: billingstack from billingstack.openstack.common import log as logging from billingstack.tests.base import TestCase -from billingstack.tests.central.storage.base import DriverMixin +from billingstack.tests.central.storage import DriverMixin LOG = logging.getLogger(__name__) diff --git a/billingstack/tests/collector/storage/__init__.py b/billingstack/tests/collector/storage/__init__.py index e69de29..88bf34d 100644 --- a/billingstack/tests/collector/storage/__init__.py +++ b/billingstack/tests/collector/storage/__init__.py @@ -0,0 +1,293 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack.openstack.common import log as logging +from billingstack.openstack.common.uuidutils import generate_uuid + + +LOG = logging.getLogger(__name__) + + +UUID = generate_uuid() +MERCHANT_UUID = generate_uuid() +CUSTOMER_UUID = generate_uuid() + + +class DriverMixin(object): + def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): + methods = [self.get_fixture('pg_method')] or methods + if not 'methods' in values: + values['methods'] = methods + + fixture = self.get_fixture('pg_provider', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + + data = self.storage_conn.pg_provider_register( + ctxt, fixture.copy(), **kw) + + return fixture, data + + def create_pg_config(self, fixture=0, values={}, + **kw): + fixture = self.get_fixture('pg_config', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_pg_config( + ctxt, fixture, **kw) + + def create_payment_method(self, fixture=0, + values={}, **kw): + fixture = self.get_fixture('payment_method', fixture, values) + ctxt = kw.pop('context', self.admin_ctxt) + return fixture, self.storage_conn.create_payment_method( + ctxt, fixture, **kw) + + # Payment Gateways + def test_pg_provider_register(self): + fixture, actual = self.pg_provider_register() + self.assertEqual(fixture['name'], actual['name']) + self.assertEqual(fixture['title'], actual['title']) + self.assertEqual(fixture['description'], actual['description']) + self.assertData(fixture['methods'][0], actual['methods'][0]) + + def test_pg_provider_register_different_methods(self): + # Add a Global method + method1 = {'type': 'creditcard', 'name': 'mastercard'} + method2 = {'type': 'creditcard', 'name': 'amex'} + method3 = {'type': 'creditcard', 'name': 'visa'} + + provider = {'name': 'noop', 'methods': [method1, method2, method3]} + + provider = self.storage_conn.pg_provider_register( + self.admin_ctxt, provider) + + # TODO(ekarls): Make this more extensive? + self.assertLen(3, provider['methods']) + + def test_get_pg_provider(self): + _, expected = self.pg_provider_register() + actual = self.storage_conn.get_pg_provider(self.admin_ctxt, + expected['id']) + self.assertData(expected, actual) + + def test_get_pg_provider_missing(self): + self.assertMissing(self.storage_conn.get_pg_provider, + self.admin_ctxt, UUID) + + def test_pg_provider_deregister(self): + _, data = self.pg_provider_register() + self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.pg_provider_deregister, + self.admin_ctxt, data['id']) + + def test_pg_provider_deregister_missing(self): + self.assertMissing(self.storage_conn.pg_provider_deregister, + self.admin_ctxt, UUID) + + # Payment Gateway Configuration + def test_create_pg_config(self): + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id']} + fixture, data = self.create_pg_config(values=values) + + self.assertData(fixture, data) + + def test_get_pg_config(self): + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id']} + + fixture, data = self.create_pg_config(values=values) + + def test_get_pg_config_missing(self): + self.assertMissing(self.storage_conn.get_pg_config, + self.admin_ctxt, UUID) + + def test_update_pg_config(self): + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id']} + fixture, data = self.create_pg_config(values=values) + + fixture['properties'] = {"api": 1} + updated = self.storage_conn.update_pg_config( + self.admin_ctxt, data['id'], fixture) + + self.assertData(fixture, updated) + + def test_update_pg_config_missing(self): + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id']} + + fixture, data = self.create_pg_config(values=values) + + self.assertMissing(self.storage_conn.update_pg_config, + self.admin_ctxt, UUID, {}) + + def test_delete_pg_config(self): + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id']} + + fixture, data = self.create_pg_config(values=values) + + self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_pg_config, + self.admin_ctxt, data['id']) + + def test_delete_pg_config_missing(self): + self.assertMissing(self.storage_conn.delete_pg_config, + self.admin_ctxt, UUID) + + # PaymentMethod + def test_create_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id'] + } + _, config = self.create_pg_config(values=values) + + # Setup PaymentMethod + values = { + 'customer_id': CUSTOMER_UUID, + 'provider_config_id': config['id']} + + fixture, data = self.create_payment_method(values=values) + self.assertData(fixture, data) + + def test_get_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id'] + } + _, config = self.create_pg_config(values=values) + + # Setup PaymentMethod + values = { + 'customer_id': CUSTOMER_UUID, + 'provider_config_id': config['id']} + + _, expected = self.create_payment_method(values=values) + actual = self.storage_conn.get_payment_method(self.admin_ctxt, + expected['id']) + self.assertData(expected, actual) + + # TODO(ekarlso): Make this test more extensive? + def test_list_payment_methods(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id'] + } + _, config = self.create_pg_config(values=values) + + # Add two Customers with some methods + customer1_id = generate_uuid() + values = { + 'customer_id': customer1_id, + 'provider_config_id': config['id']} + self.create_payment_method(values=values) + rows = self.storage_conn.list_payment_methods( + self.admin_ctxt, + criterion={'customer_id': customer1_id}) + self.assertLen(1, rows) + + customer2_id = generate_uuid() + values = { + 'customer_id': customer2_id, + 'provider_config_id': config['id']} + self.create_payment_method(values=values) + self.create_payment_method(values=values) + rows = self.storage_conn.list_payment_methods( + self.admin_ctxt, + criterion={'customer_id': customer2_id}) + self.assertLen(2, rows) + + def test_get_payment_method_missing(self): + self.assertMissing(self.storage_conn.get_payment_method, + self.admin_ctxt, UUID) + + def test_update_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id'] + } + _, config = self.create_pg_config(values=values) + + # Setup PaymentMethod + values = { + 'customer_id': CUSTOMER_UUID, + 'provider_config_id': config['id']} + + fixture, data = self.create_payment_method(values=values) + + fixture['identifier'] = 1 + updated = self.storage_conn.update_payment_method( + self.admin_ctxt, + data['id'], + fixture) + + self.assertData(fixture, updated) + + def test_update_payment_method_missing(self): + self.assertMissing(self.storage_conn.update_payment_method, + self.admin_ctxt, UUID, {}) + + def test_delete_payment_method(self): + # Setup pgp / pgm / pgc + _, provider = self.pg_provider_register() + + values = { + 'merchant_id': MERCHANT_UUID, + 'provider_id': provider['id'] + } + _, config = self.create_pg_config(values=values) + + # Setup PaymentMethod + values = { + 'customer_id': CUSTOMER_UUID, + 'provider_config_id': config['id']} + + fixture, data = self.create_payment_method(values=values) + + self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) + self.assertMissing(self.storage_conn.get_payment_method, + self.admin_ctxt, data['id']) + + def test_delete_payment_method_missing(self): + self.assertMissing(self.storage_conn.delete_payment_method, + self.admin_ctxt, UUID) diff --git a/billingstack/tests/collector/storage/test_sqlalchemy.py b/billingstack/tests/collector/storage/test_sqlalchemy.py new file mode 100644 index 0000000..df654d2 --- /dev/null +++ b/billingstack/tests/collector/storage/test_sqlalchemy.py @@ -0,0 +1,29 @@ +# Copyright 2012 Managed I.T. +# +# Author: Kiall Mac Innes +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# Copied: billingstack +from billingstack.openstack.common import log as logging +from billingstack.tests.base import TestCase +from billingstack.tests.collector.storage import DriverMixin + +LOG = logging.getLogger(__name__) + + +class SqlalchemyStorageTest(DriverMixin, TestCase): + def setUp(self): + super(SqlalchemyStorageTest, self).setUp() + fixture = self.start_storage('collector') + self.storage_conn = fixture.connection diff --git a/setup.cfg b/setup.cfg index e1b193e..e0938f0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,6 +39,10 @@ console_scripts = billingstack.central.storage = sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine +billingstack.collector.storage = + sqlalchemy = billingstack.collector.storage.impl_sqlalchemy:SQLAlchemyEngine + + billingstack.biller.storage = sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine diff --git a/tools/resync_storage.py b/tools/resync_storage.py index cb53509..dc87337 100644 --- a/tools/resync_storage.py +++ b/tools/resync_storage.py @@ -9,7 +9,7 @@ from billingstack.storage.utils import get_connection # NOTE: make this based on entrypoints ? -SERVICES = ['biller', 'central', 'rater'] +SERVICES = ['biller', 'central', 'collector', 'rater'] LOG = logging.getLogger(__name__) From d573d92617ce649153a7127900eefeee4789a8e2 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 19 Aug 2013 17:38:03 +0200 Subject: [PATCH 174/182] Update ignore Change-Id: Ie6bfaf1dae88a00d1c8175dab05413b80dca1b5f --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 960575b..2a59534 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,5 @@ billingstack-screenrc status logs .ropeproject +*.sublime-project +*.sublime-workspace From d058b242dce6b34186412cfc16206f9d204aa222 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Mon, 19 Aug 2013 21:12:32 +0200 Subject: [PATCH 175/182] Update common stuffs Change-Id: Id943dafb19638488f025362ece7ababc5d79219e --- billingstack/central/__init__.py | 1 + .../openstack/common/crypto/__init__.py | 0 billingstack/openstack/common/crypto/utils.py | 179 ++++++ billingstack/openstack/common/db/exception.py | 6 + billingstack/openstack/common/excutils.py | 7 +- billingstack/openstack/common/gettextutils.py | 136 +++-- billingstack/openstack/common/local.py | 13 +- .../openstack/common/notifier/log_notifier.py | 2 +- .../openstack/common/notifier/rpc_notifier.py | 2 +- .../common/notifier/rpc_notifier2.py | 2 +- billingstack/openstack/common/processutils.py | 11 +- billingstack/openstack/common/rpc/__init__.py | 3 +- billingstack/openstack/common/rpc/amqp.py | 9 +- .../openstack/common/rpc/impl_kombu.py | 8 +- .../openstack/common/rpc/impl_qpid.py | 2 +- billingstack/openstack/common/rpc/impl_zmq.py | 1 + .../openstack/common/rpc/matchmaker.py | 12 +- .../openstack/common/rpc/matchmaker_ring.py | 4 +- .../openstack/common/rpc/securemessage.py | 521 ++++++++++++++++++ .../openstack/common/rpc/zmq_receiver.py | 1 - billingstack/openstack/common/timeutils.py | 4 +- setup.py | 5 +- tools/pip-requires | 18 +- tools/test-requires | 4 +- 24 files changed, 859 insertions(+), 92 deletions(-) create mode 100644 billingstack/openstack/common/crypto/__init__.py create mode 100644 billingstack/openstack/common/crypto/utils.py create mode 100644 billingstack/openstack/common/rpc/securemessage.py mode change 100755 => 100644 billingstack/openstack/common/rpc/zmq_receiver.py diff --git a/billingstack/central/__init__.py b/billingstack/central/__init__.py index e7b0032..b84add9 100644 --- a/billingstack/central/__init__.py +++ b/billingstack/central/__init__.py @@ -19,6 +19,7 @@ name='service:central', title="Configuration for Central Service" )) + cfg.CONF.register_opts([ cfg.IntOpt('workers', default=None, help='Number of worker processes to spawn'), diff --git a/billingstack/openstack/common/crypto/__init__.py b/billingstack/openstack/common/crypto/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/openstack/common/crypto/utils.py b/billingstack/openstack/common/crypto/utils.py new file mode 100644 index 0000000..08e2f4c --- /dev/null +++ b/billingstack/openstack/common/crypto/utils.py @@ -0,0 +1,179 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import base64 + +from Crypto.Hash import HMAC +from Crypto import Random + +from billingstack.openstack.common.gettextutils import _ # noqa +from billingstack.openstack.common import importutils + + +class CryptoutilsException(Exception): + """Generic Exception for Crypto utilities.""" + + message = _("An unknown error occurred in crypto utils.") + + +class CipherBlockLengthTooBig(CryptoutilsException): + """The block size is too big.""" + + def __init__(self, requested, permitted): + msg = _("Block size of %(given)d is too big, max = %(maximum)d") + message = msg % {'given': requested, 'maximum': permitted} + super(CryptoutilsException, self).__init__(message) + + +class HKDFOutputLengthTooLong(CryptoutilsException): + """The amount of Key Material asked is too much.""" + + def __init__(self, requested, permitted): + msg = _("Length of %(given)d is too long, max = %(maximum)d") + message = msg % {'given': requested, 'maximum': permitted} + super(CryptoutilsException, self).__init__(message) + + +class HKDF(object): + """An HMAC-based Key Derivation Function implementation (RFC5869) + + This class creates an object that allows to use HKDF to derive keys. + """ + + def __init__(self, hashtype='SHA256'): + self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) + self.max_okm_length = 255 * self.hashfn.digest_size + + def extract(self, ikm, salt=None): + """An extract function that can be used to derive a robust key given + weak Input Key Material (IKM) which could be a password. + Returns a pseudorandom key (of HashLen octets) + + :param ikm: input keying material (ex a password) + :param salt: optional salt value (a non-secret random value) + """ + if salt is None: + salt = '\x00' * self.hashfn.digest_size + + return HMAC.new(salt, ikm, self.hashfn).digest() + + def expand(self, prk, info, length): + """An expand function that will return arbitrary length output that can + be used as keys. + Returns a buffer usable as key material. + + :param prk: a pseudorandom key of at least HashLen octets + :param info: optional string (can be a zero-length string) + :param length: length of output keying material (<= 255 * HashLen) + """ + if length > self.max_okm_length: + raise HKDFOutputLengthTooLong(length, self.max_okm_length) + + N = (length + self.hashfn.digest_size - 1) / self.hashfn.digest_size + + okm = "" + tmp = "" + for block in range(1, N + 1): + tmp = HMAC.new(prk, tmp + info + chr(block), self.hashfn).digest() + okm += tmp + + return okm[:length] + + +MAX_CB_SIZE = 256 + + +class SymmetricCrypto(object): + """Symmetric Key Crypto object. + + This class creates a Symmetric Key Crypto object that can be used + to encrypt, decrypt, or sign arbitrary data. + + :param enctype: Encryption Cipher name (default: AES) + :param hashtype: Hash/HMAC type name (default: SHA256) + """ + + def __init__(self, enctype='AES', hashtype='SHA256'): + self.cipher = importutils.import_module('Crypto.Cipher.' + enctype) + self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) + + def new_key(self, size): + return Random.new().read(size) + + def encrypt(self, key, msg, b64encode=True): + """Encrypt the provided msg and returns the cyphertext optionally + base64 encoded. + + Uses AES-128-CBC with a Random IV by default. + + The plaintext is padded to reach blocksize length. + The last byte of the block is the length of the padding. + The length of the padding does not include the length byte itself. + + :param key: The Encryption key. + :param msg: the plain text. + + :returns encblock: a block of encrypted data. + """ + iv = Random.new().read(self.cipher.block_size) + cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) + + # CBC mode requires a fixed block size. Append padding and length of + # padding. + if self.cipher.block_size > MAX_CB_SIZE: + raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE) + r = len(msg) % self.cipher.block_size + padlen = self.cipher.block_size - r - 1 + msg += '\x00' * padlen + msg += chr(padlen) + + enc = iv + cipher.encrypt(msg) + if b64encode: + enc = base64.b64encode(enc) + return enc + + def decrypt(self, key, msg, b64decode=True): + """Decrypts the provided ciphertext, optionally base 64 encoded, and + returns the plaintext message, after padding is removed. + + Uses AES-128-CBC with an IV by default. + + :param key: The Encryption key. + :param msg: the ciphetext, the first block is the IV + """ + if b64decode: + msg = base64.b64decode(msg) + iv = msg[:self.cipher.block_size] + cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) + + padded = cipher.decrypt(msg[self.cipher.block_size:]) + l = ord(padded[-1]) + 1 + plain = padded[:-l] + return plain + + def sign(self, key, msg, b64encode=True): + """Signs a message string and returns a base64 encoded signature. + + Uses HMAC-SHA-256 by default. + + :param key: The Signing key. + :param msg: the message to sign. + """ + h = HMAC.new(key, msg, self.hashfn) + out = h.digest() + if b64encode: + out = base64.b64encode(out) + return out diff --git a/billingstack/openstack/common/db/exception.py b/billingstack/openstack/common/db/exception.py index 0a231cf..01a847a 100644 --- a/billingstack/openstack/common/db/exception.py +++ b/billingstack/openstack/common/db/exception.py @@ -43,3 +43,9 @@ def __init__(self, inner_exception=None): class DBInvalidUnicodeParameter(Exception): message = _("Invalid Parameter: " "Unicode is not supported by the current database.") + + +class DbMigrationError(DBError): + """Wraps migration specific exception.""" + def __init__(self, message=None): + super(DbMigrationError, self).__init__(str(message)) diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py index 31c3d33..a2ac554 100644 --- a/billingstack/openstack/common/excutils.py +++ b/billingstack/openstack/common/excutils.py @@ -77,7 +77,8 @@ def inner_func(*args, **kwargs): try: return infunc(*args, **kwargs) except Exception as exc: - if exc.message == last_exc_message: + this_exc_message = unicode(exc) + if this_exc_message == last_exc_message: exc_count += 1 else: exc_count = 1 @@ -85,12 +86,12 @@ def inner_func(*args, **kwargs): # the exception message changes cur_time = int(time.time()) if (cur_time - last_log_time > 60 or - exc.message != last_exc_message): + this_exc_message != last_exc_message): logging.exception( _('Unexpected exception occurred %d time(s)... ' 'retrying.') % exc_count) last_log_time = cur_time - last_exc_message = exc.message + last_exc_message = this_exc_message exc_count = 0 # This should be a very rare event. In case it isn't, do # a sleep. diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py index 185aa05..cc90613 100644 --- a/billingstack/openstack/common/gettextutils.py +++ b/billingstack/openstack/common/gettextutils.py @@ -1,8 +1,8 @@ # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Red Hat, Inc. -# All Rights Reserved. # Copyright 2013 IBM Corp. +# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -31,17 +31,36 @@ import re import UserString +from babel import localedata import six _localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') _t = gettext.translation('billingstack', localedir=_localedir, fallback=True) +_AVAILABLE_LANGUAGES = [] +USE_LAZY = False + + +def enable_lazy(): + """Convenience function for configuring _() to use lazy gettext + + Call this at the start of execution to enable the gettextutils._ + function to use lazy gettext functionality. This is useful if + your project is importing _ directly instead of using the + gettextutils.install() way of importing the _ function. + """ + global USE_LAZY + USE_LAZY = True + def _(msg): - return _t.ugettext(msg) + if USE_LAZY: + return Message(msg, 'billingstack') + else: + return _t.ugettext(msg) -def install(domain): +def install(domain, lazy=False): """Install a _() function using the given translation domain. Given a translation domain, install a _() function using gettext's @@ -51,41 +70,45 @@ def install(domain): overriding the default localedir (e.g. /usr/share/locale) using a translation-domain-specific environment variable (e.g. NOVA_LOCALEDIR). - """ - gettext.install(domain, - localedir=os.environ.get(domain.upper() + '_LOCALEDIR'), - unicode=True) - - -""" -Lazy gettext functionality. - -The following is an attempt to introduce a deferred way -to do translations on messages in OpenStack. We attempt to -override the standard _() function and % (format string) operation -to build Message objects that can later be translated when we have -more information. Also included is an example LogHandler that -translates Messages to an associated locale, effectively allowing -many logs, each with their own locale. -""" - - -def get_lazy_gettext(domain): - """Assemble and return a lazy gettext function for a given domain. - Factory method for a project/module to get a lazy gettext function - for its own translation domain (i.e. nova, glance, cinder, etc.) + :param domain: the translation domain + :param lazy: indicates whether or not to install the lazy _() function. + The lazy _() introduces a way to do deferred translation + of messages by installing a _ that builds Message objects, + instead of strings, which can then be lazily translated into + any available locale. """ - - def _lazy_gettext(msg): - """Create and return a Message object. - - Message encapsulates a string so that we can translate it later when - needed. - """ - return Message(msg, domain) - - return _lazy_gettext + if lazy: + # NOTE(mrodden): Lazy gettext functionality. + # + # The following introduces a deferred way to do translations on + # messages in OpenStack. We override the standard _() function + # and % (format string) operation to build Message objects that can + # later be translated when we have more information. + # + # Also included below is an example LocaleHandler that translates + # Messages to an associated locale, effectively allowing many logs, + # each with their own locale. + + def _lazy_gettext(msg): + """Create and return a Message object. + + Lazy gettext function for a given domain, it is a factory method + for a project/module to get a lazy gettext function for its own + translation domain (i.e. nova, glance, cinder, etc.) + + Message encapsulates a string so that we can translate + it later when needed. + """ + return Message(msg, domain) + + import __builtin__ + __builtin__.__dict__['_'] = _lazy_gettext + else: + localedir = '%s_LOCALEDIR' % domain.upper() + gettext.install(domain, + localedir=os.environ.get(localedir), + unicode=True) class Message(UserString.UserString, object): @@ -130,7 +153,7 @@ def _save_dictionary_parameter(self, dict_param): # look for %(blah) fields in string; # ignore %% and deal with the # case where % is first character on the line - keys = re.findall('(?:[^%]|^)%\((\w*)\)[a-z]', full_msg) + keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg) # if we don't find any %(blah) blocks but have a %s if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg): @@ -232,6 +255,45 @@ def __getattribute__(self, name): return UserString.UserString.__getattribute__(self, name) +def get_available_languages(domain): + """Lists the available languages for the given translation domain. + + :param domain: the domain to get languages for + """ + if _AVAILABLE_LANGUAGES: + return _AVAILABLE_LANGUAGES + + localedir = '%s_LOCALEDIR' % domain.upper() + find = lambda x: gettext.find(domain, + localedir=os.environ.get(localedir), + languages=[x]) + + # NOTE(mrodden): en_US should always be available (and first in case + # order matters) since our in-line message strings are en_US + _AVAILABLE_LANGUAGES.append('en_US') + # NOTE(luisg): Babel <1.0 used a function called list(), which was + # renamed to locale_identifiers() in >=1.0, the requirements master list + # requires >=0.9.6, uncapped, so defensively work with both. We can remove + # this check when the master list updates to >=1.0, and all projects udpate + list_identifiers = (getattr(localedata, 'list', None) or + getattr(localedata, 'locale_identifiers')) + locale_identifiers = list_identifiers() + for i in locale_identifiers: + if find(i) is not None: + _AVAILABLE_LANGUAGES.append(i) + return _AVAILABLE_LANGUAGES + + +def get_localized_message(message, user_locale): + """Gets a localized version of the given message in the given locale.""" + if (isinstance(message, Message)): + if user_locale: + message.locale = user_locale + return unicode(message) + else: + return message + + class LocaleHandler(logging.Handler): """Handler that can have a locale associated to translate Messages. diff --git a/billingstack/openstack/common/local.py b/billingstack/openstack/common/local.py index f1bfc82..e82f17d 100644 --- a/billingstack/openstack/common/local.py +++ b/billingstack/openstack/common/local.py @@ -15,16 +15,15 @@ # License for the specific language governing permissions and limitations # under the License. -"""Greenthread local storage of variables using weak references""" +"""Local storage of variables using weak references""" +import threading import weakref -from eventlet import corolocal - -class WeakLocal(corolocal.local): +class WeakLocal(threading.local): def __getattribute__(self, attr): - rval = corolocal.local.__getattribute__(self, attr) + rval = super(WeakLocal, self).__getattribute__(attr) if rval: # NOTE(mikal): this bit is confusing. What is stored is a weak # reference, not the value itself. We therefore need to lookup @@ -34,7 +33,7 @@ def __getattribute__(self, attr): def __setattr__(self, attr, value): value = weakref.ref(value) - return corolocal.local.__setattr__(self, attr, value) + return super(WeakLocal, self).__setattr__(attr, value) # NOTE(mikal): the name "store" should be deprecated in the future @@ -45,4 +44,4 @@ def __setattr__(self, attr, value): # "strong" store will hold a reference to the object so that it never falls out # of scope. weak_store = WeakLocal() -strong_store = corolocal.local +strong_store = threading.local() diff --git a/billingstack/openstack/common/notifier/log_notifier.py b/billingstack/openstack/common/notifier/log_notifier.py index e842dbf..4ce03e2 100644 --- a/billingstack/openstack/common/notifier/log_notifier.py +++ b/billingstack/openstack/common/notifier/log_notifier.py @@ -25,7 +25,7 @@ def notify(_context, message): """Notifies the recipient of the desired event given the model. - Log notifications using openstack's default logging system. + Log notifications using OpenStack's default logging system. """ priority = message.get('priority', diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py index ac0e3ed..1b230a4 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ b/billingstack/openstack/common/notifier/rpc_notifier.py @@ -24,7 +24,7 @@ notification_topic_opt = cfg.ListOpt( 'notification_topics', default=['notifications', ], - help='AMQP topic used for openstack notifications') + help='AMQP topic used for OpenStack notifications') CONF = cfg.CONF CONF.register_opt(notification_topic_opt) diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py index 41f8d68..af10d48 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ b/billingstack/openstack/common/notifier/rpc_notifier2.py @@ -26,7 +26,7 @@ notification_topic_opt = cfg.ListOpt( 'topics', default=['notifications', ], - help='AMQP topic(s) used for openstack notifications') + help='AMQP topic(s) used for OpenStack notifications') opt_group = cfg.OptGroup(name='rpc_notifier2', title='Options for rpc_notifier2') diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py index e0c298a..169c52c 100644 --- a/billingstack/openstack/common/processutils.py +++ b/billingstack/openstack/common/processutils.py @@ -19,6 +19,7 @@ System-level utilities and helper functions. """ +import logging as stdlib_logging import os import random import shlex @@ -102,6 +103,9 @@ def execute(*cmd, **kwargs): :param shell: whether or not there should be a shell used to execute this command. Defaults to false. :type shell: boolean + :param loglevel: log level for execute commands. + :type loglevel: int. (Should be stdlib_logging.DEBUG or + stdlib_logging.INFO) :returns: (stdout, stderr) from process execution :raises: :class:`UnknownArgumentError` on receiving unknown arguments @@ -116,6 +120,7 @@ def execute(*cmd, **kwargs): run_as_root = kwargs.pop('run_as_root', False) root_helper = kwargs.pop('root_helper', '') shell = kwargs.pop('shell', False) + loglevel = kwargs.pop('loglevel', stdlib_logging.DEBUG) if isinstance(check_exit_code, bool): ignore_exit_code = not check_exit_code @@ -139,7 +144,7 @@ def execute(*cmd, **kwargs): while attempts > 0: attempts -= 1 try: - LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd)) + LOG.log(loglevel, _('Running cmd (subprocess): %s'), ' '.join(cmd)) _PIPE = subprocess.PIPE # pylint: disable=E1101 if os.name == 'nt': @@ -164,7 +169,7 @@ def execute(*cmd, **kwargs): obj.stdin.close() # pylint: disable=E1101 _returncode = obj.returncode # pylint: disable=E1101 if _returncode: - LOG.debug(_('Result was %s') % _returncode) + LOG.log(loglevel, _('Result was %s') % _returncode) if not ignore_exit_code and _returncode not in check_exit_code: (stdout, stderr) = result raise ProcessExecutionError(exit_code=_returncode, @@ -176,7 +181,7 @@ def execute(*cmd, **kwargs): if not attempts: raise else: - LOG.debug(_('%r failed. Retrying.'), cmd) + LOG.log(loglevel, _('%r failed. Retrying.'), cmd) if delay_on_retry: greenthread.sleep(random.randint(20, 200) / 100.0) finally: diff --git a/billingstack/openstack/common/rpc/__init__.py b/billingstack/openstack/common/rpc/__init__.py index 9c40b70..423d845 100644 --- a/billingstack/openstack/common/rpc/__init__.py +++ b/billingstack/openstack/common/rpc/__init__.py @@ -56,8 +56,7 @@ help='Seconds to wait before a cast expires (TTL). ' 'Only supported by impl_zmq.'), cfg.ListOpt('allowed_rpc_exception_modules', - default=['billingstack.openstack.common.exception', - 'nova.exception', + default=['nova.exception', 'cinder.exception', 'exceptions', ], diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index 3e2f850..683bef2 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -300,8 +300,13 @@ def pack_context(msg, context): for args at some point. """ - context_d = dict([('_context_%s' % key, value) - for (key, value) in context.to_dict().iteritems()]) + if isinstance(context, dict): + context_d = dict([('_context_%s' % key, value) + for (key, value) in context.iteritems()]) + else: + context_d = dict([('_context_%s' % key, value) + for (key, value) in context.to_dict().iteritems()]) + msg.update(context_d) diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index 3afb966..8d8dc23 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -490,12 +490,8 @@ def _fetch_ssl_params(self): # future with this? ssl_params['cert_reqs'] = ssl.CERT_REQUIRED - if not ssl_params: - # Just have the default behavior - return True - else: - # Return the extended behavior - return ssl_params + # Return the extended behavior or just have the default behavior + return ssl_params or True def _connect(self, params): """Connect to rabbit. Re-establish any queues that may have diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index b58e779..e75035d 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -320,7 +320,7 @@ class DirectPublisher(Publisher): def __init__(self, conf, session, msg_id): """Init a 'direct' publisher.""" super(DirectPublisher, self).__init__(session, msg_id, - {"type": "Direct"}) + {"type": "direct"}) class TopicPublisher(Publisher): diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index d6624ee..4f7e9eb 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -383,6 +383,7 @@ def register(self, proxy, in_addr, zmq_type_in, LOG.info(_("In reactor registered")) def consume_in_thread(self): + @excutils.forever_retry_uncaught_exceptions def _consume(sock): LOG.info(_("Consuming socket")) while True: diff --git a/billingstack/openstack/common/rpc/matchmaker.py b/billingstack/openstack/common/rpc/matchmaker.py index fcb0965..290b991 100644 --- a/billingstack/openstack/common/rpc/matchmaker.py +++ b/billingstack/openstack/common/rpc/matchmaker.py @@ -248,9 +248,7 @@ class DirectBinding(Binding): that it maps directly to a host, thus direct. """ def test(self, key): - if '.' in key: - return True - return False + return '.' in key class TopicBinding(Binding): @@ -262,17 +260,13 @@ class TopicBinding(Binding): matches that of a direct exchange. """ def test(self, key): - if '.' not in key: - return True - return False + return '.' not in key class FanoutBinding(Binding): """Match on fanout keys, where key starts with 'fanout.' string.""" def test(self, key): - if key.startswith('fanout~'): - return True - return False + return key.startswith('fanout~') class StubExchange(Exchange): diff --git a/billingstack/openstack/common/rpc/matchmaker_ring.py b/billingstack/openstack/common/rpc/matchmaker_ring.py index db3b1b4..0dca9d1 100644 --- a/billingstack/openstack/common/rpc/matchmaker_ring.py +++ b/billingstack/openstack/common/rpc/matchmaker_ring.py @@ -63,9 +63,7 @@ def __init__(self, ring=None): self.ring0[k] = itertools.cycle(self.ring[k]) def _ring_has(self, key): - if key in self.ring0: - return True - return False + return key in self.ring0 class RoundRobinRingExchange(RingExchange): diff --git a/billingstack/openstack/common/rpc/securemessage.py b/billingstack/openstack/common/rpc/securemessage.py new file mode 100644 index 0000000..ee46d58 --- /dev/null +++ b/billingstack/openstack/common/rpc/securemessage.py @@ -0,0 +1,521 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2013 Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import base64 +import collections +import os +import struct +import time + +import requests + +from oslo.config import cfg + +from billingstack.openstack.common.crypto import utils as cryptoutils +from billingstack.openstack.common import jsonutils +from billingstack.openstack.common import log as logging + +secure_message_opts = [ + cfg.BoolOpt('enabled', default=True, + help='Whether Secure Messaging (Signing) is enabled,' + ' defaults to enabled'), + cfg.BoolOpt('enforced', default=False, + help='Whether Secure Messaging (Signing) is enforced,' + ' defaults to not enforced'), + cfg.BoolOpt('encrypt', default=False, + help='Whether Secure Messaging (Encryption) is enabled,' + ' defaults to not enabled'), + cfg.StrOpt('secret_keys_file', + help='Path to the file containing the keys, takes precedence' + ' over secret_key'), + cfg.MultiStrOpt('secret_key', + help='A list of keys: (ex: name:),' + ' ignored if secret_keys_file is set'), + cfg.StrOpt('kds_endpoint', + help='KDS endpoint (ex: http://kds.example.com:35357/v3)'), +] +secure_message_group = cfg.OptGroup('secure_messages', + title='Secure Messaging options') + +LOG = logging.getLogger(__name__) + + +class SecureMessageException(Exception): + """Generic Exception for Secure Messages.""" + + msg = "An unknown Secure Message related exception occurred." + + def __init__(self, msg=None): + if msg is None: + msg = self.msg + super(SecureMessageException, self).__init__(msg) + + +class SharedKeyNotFound(SecureMessageException): + """No shared key was found and no other external authentication mechanism + is available. + """ + + msg = "Shared Key for [%s] Not Found. (%s)" + + def __init__(self, name, errmsg): + super(SharedKeyNotFound, self).__init__(self.msg % (name, errmsg)) + + +class InvalidMetadata(SecureMessageException): + """The metadata is invalid.""" + + msg = "Invalid metadata: %s" + + def __init__(self, err): + super(InvalidMetadata, self).__init__(self.msg % err) + + +class InvalidSignature(SecureMessageException): + """Signature validation failed.""" + + msg = "Failed to validate signature (source=%s, destination=%s)" + + def __init__(self, src, dst): + super(InvalidSignature, self).__init__(self.msg % (src, dst)) + + +class UnknownDestinationName(SecureMessageException): + """The Destination name is unknown to us.""" + + msg = "Invalid destination name (%s)" + + def __init__(self, name): + super(UnknownDestinationName, self).__init__(self.msg % name) + + +class InvalidEncryptedTicket(SecureMessageException): + """The Encrypted Ticket could not be successfully handled.""" + + msg = "Invalid Ticket (source=%s, destination=%s)" + + def __init__(self, src, dst): + super(InvalidEncryptedTicket, self).__init__(self.msg % (src, dst)) + + +class InvalidExpiredTicket(SecureMessageException): + """The ticket received is already expired.""" + + msg = "Expired ticket (source=%s, destination=%s)" + + def __init__(self, src, dst): + super(InvalidExpiredTicket, self).__init__(self.msg % (src, dst)) + + +class CommunicationError(SecureMessageException): + """The Communication with the KDS failed.""" + + msg = "Communication Error (target=%s): %s" + + def __init__(self, target, errmsg): + super(CommunicationError, self).__init__(self.msg % (target, errmsg)) + + +class InvalidArgument(SecureMessageException): + """Bad initialization argument.""" + + msg = "Invalid argument: %s" + + def __init__(self, errmsg): + super(InvalidArgument, self).__init__(self.msg % errmsg) + + +Ticket = collections.namedtuple('Ticket', ['skey', 'ekey', 'esek']) + + +class KeyStore(object): + """A storage class for Signing and Encryption Keys. + + This class creates an object that holds Generic Keys like Signing + Keys, Encryption Keys, Encrypted SEK Tickets ... + """ + + def __init__(self): + self._kvps = dict() + + def _get_key_name(self, source, target, ktype): + return (source, target, ktype) + + def _put(self, src, dst, ktype, expiration, data): + name = self._get_key_name(src, dst, ktype) + self._kvps[name] = (expiration, data) + + def _get(self, src, dst, ktype): + name = self._get_key_name(src, dst, ktype) + if name in self._kvps: + expiration, data = self._kvps[name] + if expiration > time.time(): + return data + else: + del self._kvps[name] + + return None + + def clear(self): + """Wipes the store clear of all data.""" + self._kvps.clear() + + def put_ticket(self, source, target, skey, ekey, esek, expiration): + """Puts a sek pair in the cache. + + :param source: Client name + :param target: Target name + :param skey: The Signing Key + :param ekey: The Encription Key + :param esek: The token encrypted with the target key + :param expiration: Expiration time in seconds since Epoch + """ + keys = Ticket(skey, ekey, esek) + self._put(source, target, 'ticket', expiration, keys) + + def get_ticket(self, source, target): + """Returns a Ticket (skey, ekey, esek) namedtuple for the + source/target pair. + """ + return self._get(source, target, 'ticket') + + +_KEY_STORE = KeyStore() + + +class _KDSClient(object): + + USER_AGENT = 'oslo-incubator/rpc' + + def __init__(self, endpoint=None, timeout=None): + """A KDS Client class.""" + + self._endpoint = endpoint + if timeout is not None: + self.timeout = float(timeout) + else: + self.timeout = None + + def _do_get(self, url, request): + req_kwargs = dict() + req_kwargs['headers'] = dict() + req_kwargs['headers']['User-Agent'] = self.USER_AGENT + req_kwargs['headers']['Content-Type'] = 'application/json' + req_kwargs['data'] = jsonutils.dumps({'request': request}) + if self.timeout is not None: + req_kwargs['timeout'] = self.timeout + + try: + resp = requests.get(url, **req_kwargs) + except requests.ConnectionError as e: + err = "Unable to establish connection. %s" % e + raise CommunicationError(url, err) + + return resp + + def _get_reply(self, url, resp): + if resp.text: + try: + body = jsonutils.loads(resp.text) + reply = body['reply'] + except (KeyError, TypeError, ValueError): + msg = "Failed to decode reply: %s" % resp.text + raise CommunicationError(url, msg) + else: + msg = "No reply data was returned." + raise CommunicationError(url, msg) + + return reply + + def _get_ticket(self, request, url=None, redirects=10): + """Send an HTTP request. + + Wraps around 'requests' to handle redirects and common errors. + """ + if url is None: + if not self._endpoint: + raise CommunicationError(url, 'Endpoint not configured') + url = self._endpoint + '/kds/ticket' + + while redirects: + resp = self._do_get(url, request) + if resp.status_code in (301, 302, 305): + # Redirected. Reissue the request to the new location. + url = resp.headers['location'] + redirects -= 1 + continue + elif resp.status_code != 200: + msg = "Request returned failure status: %s (%s)" + err = msg % (resp.status_code, resp.text) + raise CommunicationError(url, err) + + return self._get_reply(url, resp) + + raise CommunicationError(url, "Too many redirections, giving up!") + + def get_ticket(self, source, target, crypto, key): + + # prepare metadata + md = {'requestor': source, + 'target': target, + 'timestamp': time.time(), + 'nonce': struct.unpack('Q', os.urandom(8))[0]} + metadata = base64.b64encode(jsonutils.dumps(md)) + + # sign metadata + signature = crypto.sign(key, metadata) + + # HTTP request + reply = self._get_ticket({'metadata': metadata, + 'signature': signature}) + + # verify reply + signature = crypto.sign(key, (reply['metadata'] + reply['ticket'])) + if signature != reply['signature']: + raise InvalidEncryptedTicket(md['source'], md['destination']) + md = jsonutils.loads(base64.b64decode(reply['metadata'])) + if ((md['source'] != source or + md['destination'] != target or + md['expiration'] < time.time())): + raise InvalidEncryptedTicket(md['source'], md['destination']) + + # return ticket data + tkt = jsonutils.loads(crypto.decrypt(key, reply['ticket'])) + + return tkt, md['expiration'] + + +# we need to keep a global nonce, as this value should never repeat non +# matter how many SecureMessage objects we create +_NONCE = None + + +def _get_nonce(): + """We keep a single counter per instance, as it is so huge we can't + possibly cycle through within 1/100 of a second anyway. + """ + + global _NONCE + # Lazy initialize, for now get a random value, multiply by 2^32 and + # use it as the nonce base. The counter itself will rotate after + # 2^32 increments. + if _NONCE is None: + _NONCE = [struct.unpack('I', os.urandom(4))[0], 0] + + # Increment counter and wrap at 2^32 + _NONCE[1] += 1 + if _NONCE[1] > 0xffffffff: + _NONCE[1] = 0 + + # Return base + counter + return long((_NONCE[0] * 0xffffffff)) + _NONCE[1] + + +class SecureMessage(object): + """A Secure Message object. + + This class creates a signing/encryption facility for RPC messages. + It encapsulates all the necessary crypto primitives to insulate + regular code from the intricacies of message authentication, validation + and optionally encryption. + + :param topic: The topic name of the queue + :param host: The server name, together with the topic it forms a unique + name that is used to source signing keys, and verify + incoming messages. + :param conf: a ConfigOpts object + :param key: (optional) explicitly pass in endpoint private key. + If not provided it will be sourced from the service config + :param key_store: (optional) Storage class for local caching + :param encrypt: (defaults to False) Whether to encrypt messages + :param enctype: (defaults to AES) Cipher to use + :param hashtype: (defaults to SHA256) Hash function to use for signatures + """ + + def __init__(self, topic, host, conf, key=None, key_store=None, + encrypt=None, enctype='AES', hashtype='SHA256'): + + conf.register_group(secure_message_group) + conf.register_opts(secure_message_opts, group='secure_messages') + + self._name = '%s.%s' % (topic, host) + self._key = key + self._conf = conf.secure_messages + self._encrypt = self._conf.encrypt if (encrypt is None) else encrypt + self._crypto = cryptoutils.SymmetricCrypto(enctype, hashtype) + self._hkdf = cryptoutils.HKDF(hashtype) + self._kds = _KDSClient(self._conf.kds_endpoint) + + if self._key is None: + self._key = self._init_key(topic, self._name) + if self._key is None: + err = "Secret Key (or key file) is missing or malformed" + raise SharedKeyNotFound(self._name, err) + + self._key_store = key_store or _KEY_STORE + + def _init_key(self, topic, name): + keys = None + if self._conf.secret_keys_file: + with open(self._conf.secret_keys_file, 'r') as f: + keys = f.readlines() + elif self._conf.secret_key: + keys = self._conf.secret_key + + if keys is None: + return None + + for k in keys: + if k[0] == '#': + continue + if ':' not in k: + break + svc, key = k.split(':', 1) + if svc == topic or svc == name: + return base64.b64decode(key) + + return None + + def _split_key(self, key, size): + sig_key = key[:size] + enc_key = key[size:] + return sig_key, enc_key + + def _decode_esek(self, key, source, target, timestamp, esek): + """This function decrypts the esek buffer passed in and returns a + KeyStore to be used to check and decrypt the received message. + + :param key: The key to use to decrypt the ticket (esek) + :param source: The name of the source service + :param traget: The name of the target service + :param timestamp: The incoming message timestamp + :param esek: a base64 encoded encrypted block containing a JSON string + """ + rkey = None + + try: + s = self._crypto.decrypt(key, esek) + j = jsonutils.loads(s) + + rkey = base64.b64decode(j['key']) + expiration = j['timestamp'] + j['ttl'] + if j['timestamp'] > timestamp or timestamp > expiration: + raise InvalidExpiredTicket(source, target) + + except Exception: + raise InvalidEncryptedTicket(source, target) + + info = '%s,%s,%s' % (source, target, str(j['timestamp'])) + + sek = self._hkdf.expand(rkey, info, len(key) * 2) + + return self._split_key(sek, len(key)) + + def _get_ticket(self, target): + """This function will check if we already have a SEK for the specified + target in the cache, or will go and try to fetch a new SEK from the key + server. + + :param target: The name of the target service + """ + ticket = self._key_store.get_ticket(self._name, target) + + if ticket is not None: + return ticket + + tkt, expiration = self._kds.get_ticket(self._name, target, + self._crypto, self._key) + + self._key_store.put_ticket(self._name, target, + base64.b64decode(tkt['skey']), + base64.b64decode(tkt['ekey']), + tkt['esek'], expiration) + return self._key_store.get_ticket(self._name, target) + + def encode(self, version, target, json_msg): + """This is the main encoding function. + + It takes a target and a message and returns a tuple consisting of a + JSON serialized metadata object, a JSON serialized (and optionally + encrypted) message, and a signature. + + :param version: the current envelope version + :param target: The name of the target service (usually with hostname) + :param json_msg: a serialized json message object + """ + ticket = self._get_ticket(target) + + metadata = jsonutils.dumps({'source': self._name, + 'destination': target, + 'timestamp': time.time(), + 'nonce': _get_nonce(), + 'esek': ticket.esek, + 'encryption': self._encrypt}) + + message = json_msg + if self._encrypt: + message = self._crypto.encrypt(ticket.ekey, message) + + signature = self._crypto.sign(ticket.skey, + version + metadata + message) + + return (metadata, message, signature) + + def decode(self, version, metadata, message, signature): + """This is the main decoding function. + + It takes a version, metadata, message and signature strings and + returns a tuple with a (decrypted) message and metadata or raises + an exception in case of error. + + :param version: the current envelope version + :param metadata: a JSON serialized object with metadata for validation + :param message: a JSON serialized (base64 encoded encrypted) message + :param signature: a base64 encoded signature + """ + md = jsonutils.loads(metadata) + + check_args = ('source', 'destination', 'timestamp', + 'nonce', 'esek', 'encryption') + for arg in check_args: + if arg not in md: + raise InvalidMetadata('Missing metadata "%s"' % arg) + + if md['destination'] != self._name: + # TODO(simo) handle group keys by checking target + raise UnknownDestinationName(md['destination']) + + try: + skey, ekey = self._decode_esek(self._key, + md['source'], md['destination'], + md['timestamp'], md['esek']) + except InvalidExpiredTicket: + raise + except Exception: + raise InvalidMetadata('Failed to decode ESEK for %s/%s' % ( + md['source'], md['destination'])) + + sig = self._crypto.sign(skey, version + metadata + message) + + if sig != signature: + raise InvalidSignature(md['source'], md['destination']) + + if md['encryption'] is True: + msg = self._crypto.decrypt(ekey, message) + else: + msg = message + + return (md, msg) diff --git a/billingstack/openstack/common/rpc/zmq_receiver.py b/billingstack/openstack/common/rpc/zmq_receiver.py old mode 100755 new mode 100644 index 17f9d06..6fd8398 --- a/billingstack/openstack/common/rpc/zmq_receiver.py +++ b/billingstack/openstack/common/rpc/zmq_receiver.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack Foundation diff --git a/billingstack/openstack/common/timeutils.py b/billingstack/openstack/common/timeutils.py index bd60489..aa9f708 100644 --- a/billingstack/openstack/common/timeutils.py +++ b/billingstack/openstack/common/timeutils.py @@ -49,9 +49,9 @@ def parse_isotime(timestr): try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: - raise ValueError(e.message) + raise ValueError(unicode(e)) except TypeError as e: - raise ValueError(e.message) + raise ValueError(unicode(e)) def strtime(at=None, fmt=PERFECT_TIME_FORMAT): diff --git a/setup.py b/setup.py index 1e9882d..2a0786a 100644 --- a/setup.py +++ b/setup.py @@ -14,8 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools setuptools.setup( - setup_requires=['d2to1>=0.2.10,<0.3', 'pbr>=0.5,<0.6'], - d2to1=True) + setup_requires=['pbr>=0.5.21,<1.0'], + pbr=True) diff --git a/tools/pip-requires b/tools/pip-requires index 6fe90f7..4b763ae 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -1,18 +1,18 @@ -d2to1>=0.2.10,<0.3 -pbr>=0.5.16,<0.6 +Babel>=0.9.6 +pbr>=0.5.21,<1.0 # This file is managed by openstack-depends argparse cliff>=1.4 -eventlet>=0.12.0 +eventlet>=0.13.0 extras -pecan +pecan>=0.2.0 iso8601>=0.1.4 netaddr oslo.config>=1.1.0 -paste -pastedeploy>=1.5.0 +Paste +PasteDeploy>=1.5.0 pycountry -routes>=1.12.3 -stevedore>=0.9 -webob>=1.2.3,<1.3 +Routes>=1.12.3 +stevedore>=0.10 +WebOb>=1.2.3,<1.3 https://bitbucket.org/cdevienne/wsme/get/tip.zip#egg=WSME diff --git a/tools/test-requires b/tools/test-requires index b667164..05e23e8 100644 --- a/tools/test-requires +++ b/tools/test-requires @@ -3,7 +3,7 @@ coverage>=3.6 discover docutils==0.9.1 flake8==2.0 -mock>=0.8.0 +mock>=1.0 mox>=0.5.3 nose nosehtmloutput>=0.0.3 @@ -11,5 +11,5 @@ openstack.nose_plugin>=0.7 python-subunit sphinx>=1.1.2 sphinxcontrib-httpdomain -testrepository>=0.0.13 +testrepository>=0.0.17 unittest2 From 2b757687e38902c50434402bf0b44b7e395c1ce8 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Thu, 22 Aug 2013 09:07:15 +0200 Subject: [PATCH 176/182] Fix WSME requirement to point to stackforge Change-Id: If14b9077666a12054194ca246ebdb6bed106ee96 --- tools/pip-requires | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/pip-requires b/tools/pip-requires index 4b763ae..4b03ff6 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -15,4 +15,4 @@ pycountry Routes>=1.12.3 stevedore>=0.10 WebOb>=1.2.3,<1.3 -https://bitbucket.org/cdevienne/wsme/get/tip.zip#egg=WSME +https://github.com/stackforge/wsme/archive/master.zip#egg=WSME From 117c68ee2dd8aca81d18605f894df2eb596ed9d1 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 21 Aug 2013 00:07:30 +0200 Subject: [PATCH 177/182] Switch to taskflow for collector Change-Id: I9a7abed07e99f6061d88782f6ea8bd1c3a0c9a93 --- billingstack/collector/flows/__init__.py | 17 + .../collector/flows/gateway_configuration.py | 115 ++++ .../collector/flows/payment_method.py | 120 ++++ billingstack/collector/service.py | 12 +- billingstack/collector/states.py | 20 + .../collector/storage/impl_sqlalchemy.py | 5 + billingstack/payment_gateway/base.py | 15 +- billingstack/payment_gateway/dummy.py | 14 + billingstack/samples_data/pg_provider.json | 6 +- billingstack/taskflow/__init__.py | 1 + billingstack/taskflow/decorators.py | 97 +++ billingstack/taskflow/exceptions.py | 77 +++ billingstack/taskflow/flow.py | 216 ++++++ billingstack/taskflow/functor_task.py | 95 +++ billingstack/taskflow/graph_utils.py | 80 +++ billingstack/taskflow/patterns/__init__.py | 1 + billingstack/taskflow/patterns/linear_flow.py | 286 ++++++++ .../taskflow/patterns/threaded_flow.py | 636 ++++++++++++++++++ billingstack/taskflow/states.py | 44 ++ billingstack/taskflow/task.py | 77 +++ billingstack/taskflow/utils.py | 532 +++++++++++++++ billingstack/tasks.py | 85 +++ taskflow.conf | 7 + tools/pip-requires | 3 + 24 files changed, 2555 insertions(+), 6 deletions(-) create mode 100644 billingstack/collector/flows/__init__.py create mode 100644 billingstack/collector/flows/gateway_configuration.py create mode 100644 billingstack/collector/flows/payment_method.py create mode 100644 billingstack/collector/states.py create mode 100644 billingstack/taskflow/__init__.py create mode 100644 billingstack/taskflow/decorators.py create mode 100644 billingstack/taskflow/exceptions.py create mode 100644 billingstack/taskflow/flow.py create mode 100644 billingstack/taskflow/functor_task.py create mode 100644 billingstack/taskflow/graph_utils.py create mode 100644 billingstack/taskflow/patterns/__init__.py create mode 100644 billingstack/taskflow/patterns/linear_flow.py create mode 100644 billingstack/taskflow/patterns/threaded_flow.py create mode 100644 billingstack/taskflow/states.py create mode 100644 billingstack/taskflow/task.py create mode 100644 billingstack/taskflow/utils.py create mode 100644 billingstack/tasks.py create mode 100644 taskflow.conf diff --git a/billingstack/collector/flows/__init__.py b/billingstack/collector/flows/__init__.py new file mode 100644 index 0000000..b2870ed --- /dev/null +++ b/billingstack/collector/flows/__init__.py @@ -0,0 +1,17 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/billingstack/collector/flows/gateway_configuration.py b/billingstack/collector/flows/gateway_configuration.py new file mode 100644 index 0000000..0ea7d2d --- /dev/null +++ b/billingstack/collector/flows/gateway_configuration.py @@ -0,0 +1,115 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack import exceptions +from billingstack import tasks +from billingstack.openstack.common import log +from billingstack.payment_gateway import get_provider +from billingstack.taskflow.patterns import linear_flow, threaded_flow + + +ACTION = 'gateway_configuration:create' + +LOG = log.getLogger(__name__) + + +class EntryCreateTask(tasks.RootTask): + def __init__(self, storage, **kw): + super(EntryCreateTask, self).__init__(**kw) + self.requires.update(['gateway_config']) + self.provides.update(['gateway_config']) + self.storage = storage + + def __call__(self, context, gateway_config): + values = self.storage.create_pg_config(context, gateway_config) + return {'gateway_config': values} + + +class ThreadStartTask(tasks.RootTask): + """ + This is the end of the current flow, we'll fire off a new threaded flow + that does stuff towards the actual Gateway which may include blocking code. + """ + def __init__(self, storage, **kw): + super(ThreadStartTask, self).__init__(**kw) + self.requires.update(['gateway_config']) + self.storage = storage + + def __call__(self, ctxt, gateway_config): + flow = threaded_flow.Flow(ACTION + ':backend') + flow.add(tasks.ValuesInjectTask({'gateway_config': gateway_config})) + flow.add(PrerequirementsTask(self.storage)) + flow.add(BackendVerifyTask(self.storage)) + flow.run(ctxt) + + +class PrerequirementsTask(tasks.RootTask): + """ + Fetch provider information for use in the next task. + """ + def __init__(self, storage, **kw): + super(PrerequirementsTask, self).__init__(**kw) + self.requires.update(['gateway_config']) + self.provides.update([ + 'gateway_config', + 'gateway_provider' + ]) + self.storage = storage + + def __call__(self, ctxt, gateway_config): + gateway_provider = self.storage.get_pg_provider( + gateway_config['providedr_id']) + return { + 'gateway_config': gateway_config, + 'gateway_provider': gateway_provider + } + + +class BackendVerifyTask(tasks.RootTask): + """ + This is the verification task that runs in a threaded flow. + + 1. Load the Provider Plugin via entrypoints + 2. Instantiate the Plugin with the Config + 3. Execute verify_config call + 4. Update storage accordingly + """ + def __init__(self, storage, **kw): + super(BackendVerifyTask, self).__init__(**kw) + self.requires.update(['gateway_config', 'gateway_provider']) + self.storage = storage + + def __call__(self, ctxt, gateway_config, gateway_provider): + gateway_provider_cls = get_provider[gateway_provider['name']] + gateway_provider_obj = gateway_provider_cls(gateway_config) + try: + gateway_provider_obj.verify_config() + except exceptions.ConfigurationError: + raise + + +def create_flow(storage, values): + flow = linear_flow.Flow(ACTION) + + flow.add(tasks.ValuesInjectTask( + {'gateway_config': values}, + prefix=ACTION + ':initial')) + + entry_task = EntryCreateTask(storage, prefix=ACTION) + entry_task_id = flow.add(entry_task) + + return entry_task_id, tasks._attach_debug_listeners(flow) diff --git a/billingstack/collector/flows/payment_method.py b/billingstack/collector/flows/payment_method.py new file mode 100644 index 0000000..6f80269 --- /dev/null +++ b/billingstack/collector/flows/payment_method.py @@ -0,0 +1,120 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack import tasks +from billingstack.openstack.common import log +from billingstack.payment_gateway import get_provider +from billingstack.taskflow.patterns import linear_flow, threaded_flow + + +ACTION = 'payment_method:create' + +LOG = log.getLogger(__name__) + + +class EntryCreateTask(tasks.RootTask): + """ + Create the initial entry in the database + """ + def __init__(self, storage, **kw): + super(EntryCreateTask, self).__init__(**kw) + self.requires.update(['payment_method']) + self.provides.update(['payment_method']) + self.storage = storage + + def __call__(self, ctxt, payment_method): + values = self.storage.create_payment_method(ctxt, payment_method) + return {'payment_method': values} + + +class ThreadStartTask(tasks.RootTask): + """ + This is the end of the current flow, we'll fire off a new threaded flow + that does stuff towards the actual Gateway which may include blocking code. + + This fires off a new flow that is threaded / greenthreads? + """ + def __init__(self, storage, **kw): + super(ThreadStartTask, self).__init__(**kw) + self.requires.update(['payment_method']) + self.storage = storage + + def __call__(self, ctxt, payment_method): + flow = threaded_flow.Flow(ACTION + ':backend') + flow.add(tasks.ValuesInjectTask({'payment_method': payment_method})) + flow.add(PrerequirementsTask(self.storage)) + flow.add(BackendCreateTask(self.storage)) + flow.run(ctxt) + + +class PrerequirementsTask(tasks.RootTask): + """ + Task to get the config and the provider from the catalog / database. + """ + def __init__(self, storage, **kw): + super(PrerequirementsTask, self).__init__(**kw) + self.requires.update(['payment_method']) + self.provides.update([ + 'payment_method', + 'gateway_config', + 'gateway_provider']) + self.storage = storage + + def __call__(self, ctxt, **kw): + kw['gateway_config'] = self.storage.get_pg_config( + ctxt, kw['payment_method']['provider_config_id']) + + kw['gateway_provider'] = self.storage.get_pg_provider( + ctxt, kw['gateway_config']['provider_id']) + + return kw + + +class BackendCreateTask(tasks.RootTask): + def __init__(self, storage, **kw): + super(BackendCreateTask, self).__init__(**kw) + self.requires.update([ + 'payment_method', + 'gateway_config', + 'gateway_provider']) + self.storage = storage + + def __call__(self, ctxt, payment_method, gateway_config, gateway_provider): + gateway_provider_cls = get_provider(gateway_provider['name']) + gateway_provider_obj = gateway_provider_cls(gateway_config) + + gateway_provider_obj.create_payment_method( + payment_method['customer_id'], + payment_method) + + +def create_flow(storage, payment_method): + """ + The flow for the service to start + """ + flow = linear_flow.Flow(ACTION + ':initial') + + flow.add(tasks.ValuesInjectTask( + {'payment_method': payment_method}, + prefix=ACTION)) + + entry_task = EntryCreateTask(storage, prefix=ACTION) + entry_task_id = flow.add(entry_task) + + flow.add(ThreadStartTask(storage, prefix=ACTION)) + + return entry_task_id, tasks._attach_debug_listeners(flow) diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index d88aed3..6314de1 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -26,6 +26,8 @@ from billingstack.storage.utils import get_connection from billingstack.central.rpcapi import CentralAPI from billingstack import service as bs_service +from billingstack.collector.flows import ( + gateway_configuration, payment_method) cfg.CONF.import_opt('host', 'billingstack.netconf') @@ -62,7 +64,10 @@ def list_pg_providers(self, ctxt, **kw): # PGC def create_pg_config(self, ctxt, values): - return self.storage_conn.create_pg_config(ctxt, values) + id_, flow = gateway_configuration.create_flow( + self.storage_conn, values) + flow.run(ctxt) + return flow.results[id_]['gateway_config'] def list_pg_configs(self, ctxt, **kw): return self.storage_conn.list_pg_configs(ctxt, **kw) @@ -78,7 +83,10 @@ def delete_pg_config(self, ctxt, id_): # PM def create_payment_method(self, ctxt, values): - return self.storage_conn.create_payment_method(ctxt, values) + id_, flow = payment_method.create_flow( + self.storage_conn, values) + flow.run(ctxt) + return flow.results[id_]['payment_method'] def list_payment_methods(self, ctxt, **kw): return self.storage_conn.list_payment_methods(ctxt, **kw) diff --git a/billingstack/collector/states.py b/billingstack/collector/states.py new file mode 100644 index 0000000..e9aa9fc --- /dev/null +++ b/billingstack/collector/states.py @@ -0,0 +1,20 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +PENDING = u'PENDING' +CREATED = u'CREATED' +INVALID = u'INVALID' diff --git a/billingstack/collector/storage/impl_sqlalchemy.py b/billingstack/collector/storage/impl_sqlalchemy.py index 8db8695..6d06ee7 100644 --- a/billingstack/collector/storage/impl_sqlalchemy.py +++ b/billingstack/collector/storage/impl_sqlalchemy.py @@ -21,6 +21,7 @@ from sqlalchemy.orm import exc, relationship from sqlalchemy.ext.declarative import declarative_base +from billingstack.collector import states from billingstack.collector.storage import Connection, StorageEngine from billingstack.openstack.common import log as logging from billingstack.sqlalchemy.types import JSON, UUID @@ -116,6 +117,8 @@ class PGConfig(BASE, model_base.BaseMixin): onupdate='CASCADE'), nullable=False) + state = Column(Unicode(20), default=states.PENDING) + class PaymentMethod(BASE, model_base.BaseMixin): name = Column(Unicode(255), nullable=False) @@ -132,6 +135,8 @@ class PaymentMethod(BASE, model_base.BaseMixin): provider_config_id = Column(UUID, ForeignKey('pg_config.id', onupdate='CASCADE'), nullable=False) + state = Column(Unicode(20), default=states.PENDING) + class SQLAlchemyEngine(StorageEngine): __plugin_name__ = 'sqlalchemy' diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py index 3a3f110..31e4d1b 100644 --- a/billingstack/payment_gateway/base.py +++ b/billingstack/payment_gateway/base.py @@ -17,6 +17,9 @@ class Provider(Plugin): + """ + Base API for Gateway Plugins. + """ __plugin_ns__ = 'billingstack.payment_gateway' __plugin_type__ = 'payment_gateway' @@ -44,6 +47,9 @@ def properties(cls): @classmethod def values(cls): + """ + The values for this provider, used when registering in the catalog. + """ return dict( name=cls.get_plugin_name(), title=cls.__title__, @@ -56,7 +62,14 @@ def get_client(self): """ raise NotImplementedError - @classmethod + def verify_config(self): + """ + Verify a configuration. + + Raise ConfigurationError if invalid config. + """ + raise NotImplementedError + def create_account(self, values): """ Create a new Account diff --git a/billingstack/payment_gateway/dummy.py b/billingstack/payment_gateway/dummy.py index 7cd373e..2896e44 100644 --- a/billingstack/payment_gateway/dummy.py +++ b/billingstack/payment_gateway/dummy.py @@ -16,6 +16,11 @@ from billingstack.payment_gateway.base import Provider +class DummyClient(object): + def __init__(self): + pass + + class DummyProvider(Provider): """ A Stupid Provider that does nothing @@ -32,3 +37,12 @@ def methods(cls): @classmethod def properties(cls): return {"enabled": 0} + + def get_client(self): + return DummyClient() + + def create_payment_method(self, account_id, values): + return True + + def verify_config(self): + return True diff --git a/billingstack/samples_data/pg_provider.json b/billingstack/samples_data/pg_provider.json index 7e65230..0c2db64 100644 --- a/billingstack/samples_data/pg_provider.json +++ b/billingstack/samples_data/pg_provider.json @@ -1,7 +1,7 @@ [ { - "name" : "braintree", - "title" : "BrainTree Payment Gateway", - "description" : "www.braintree.com integration provider" + "name" : "dummy", + "title" : "Dummy Provider", + "description" : "Dummy integration provider" } ] diff --git a/billingstack/taskflow/__init__.py b/billingstack/taskflow/__init__.py new file mode 100644 index 0000000..1f19be5 --- /dev/null +++ b/billingstack/taskflow/__init__.py @@ -0,0 +1 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 diff --git a/billingstack/taskflow/decorators.py b/billingstack/taskflow/decorators.py new file mode 100644 index 0000000..c5320df --- /dev/null +++ b/billingstack/taskflow/decorators.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import functools + +from billingstack.taskflow import functor_task +from billingstack.taskflow import utils + + +def wraps(fn): + """This will not be needed in python 3.2 or greater which already has this + built-in to its functools.wraps method. + """ + + def wrapper(f): + f = functools.wraps(fn)(f) + f.__wrapped__ = getattr(fn, '__wrapped__', fn) + return f + + return wrapper + + +def locked(*args, **kwargs): + + def decorator(f): + attr_name = kwargs.get('lock', '_lock') + + @wraps(f) + def wrapper(*args, **kwargs): + lock = getattr(args[0], attr_name) + with lock: + return f(*args, **kwargs) + + return wrapper + + # This is needed to handle when the decorator has args or the decorator + # doesn't have args, python is rather weird here... + if kwargs or not args: + return decorator + else: + if len(args) == 1: + return decorator(args[0]) + else: + return decorator + + +def _original_function(fun): + """Get original function from static or class method""" + if isinstance(fun, staticmethod): + return fun.__get__(object()) + elif isinstance(fun, classmethod): + return fun.__get__(object()).im_func + return fun + + +def task(*args, **kwargs): + """Decorates a given function so that it can be used as a task""" + + def decorator(f): + def task_factory(execute_with, **factory_kwargs): + merged = kwargs.copy() + merged.update(factory_kwargs) + # NOTE(imelnikov): we can't capture f here because for + # bound methods and bound class methods the object it + # is bound to is yet unknown at the moment + return functor_task.FunctorTask(execute_with, **merged) + w_f = _original_function(f) + setattr(w_f, utils.TASK_FACTORY_ATTRIBUTE, task_factory) + return f + + # This is needed to handle when the decorator has args or the decorator + # doesn't have args, python is rather weird here... + if kwargs: + if args: + raise TypeError('task decorator takes 0 positional arguments,' + '%s given' % len(args)) + return decorator + else: + if len(args) == 1: + return decorator(args[0]) + else: + return decorator diff --git a/billingstack/taskflow/exceptions.py b/billingstack/taskflow/exceptions.py new file mode 100644 index 0000000..7f572ff --- /dev/null +++ b/billingstack/taskflow/exceptions.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +class TaskFlowException(Exception): + """Base class for exceptions emitted from this library.""" + pass + + +class Duplicate(TaskFlowException): + """Raised when a duplicate entry is found.""" + pass + + +class StorageError(TaskFlowException): + """Raised when logbook can not be read/saved/deleted.""" + + def __init__(self, message, cause=None): + super(StorageError, self).__init__(message) + self.cause = cause + + +class NotFound(TaskFlowException): + """Raised when some entry in some object doesn't exist.""" + pass + + +class AlreadyExists(TaskFlowException): + """Raised when some entry in some object already exists.""" + pass + + +class ClosedException(TaskFlowException): + """Raised when an access on a closed object occurs.""" + pass + + +class InvalidStateException(TaskFlowException): + """Raised when a task/job/workflow is in an invalid state when an + operation is attempting to apply to said task/job/workflow. + """ + pass + + +class UnclaimableJobException(TaskFlowException): + """Raised when a job can not be claimed.""" + pass + + +class JobNotFound(TaskFlowException): + """Raised when a job entry can not be found.""" + pass + + +class MissingDependencies(InvalidStateException): + """Raised when a entity has dependencies that can not be satisified.""" + message = ("%(who)s requires %(requirements)s but no other entity produces" + " said requirements") + + def __init__(self, who, requirements): + message = self.message % {'who': who, 'requirements': requirements} + super(MissingDependencies, self).__init__(message) diff --git a/billingstack/taskflow/flow.py b/billingstack/taskflow/flow.py new file mode 100644 index 0000000..e295574 --- /dev/null +++ b/billingstack/taskflow/flow.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import threading + +from billingstack.openstack.common import uuidutils + +from billingstack.taskflow import exceptions as exc +from billingstack.taskflow import states +from billingstack.taskflow import utils + + +class Flow(object): + """The base abstract class of all flow implementations. + + It provides a set of parents to flows that have a concept of parent flows + as well as a state and state utility functions to the deriving classes. It + also provides a name and an identifier (uuid or other) to the flow so that + it can be uniquely identifed among many flows. + + Flows are expected to provide (if desired) the following methods: + - add + - add_many + - interrupt + - reset + - rollback + - run + - soft_reset + """ + + __metaclass__ = abc.ABCMeta + + # Common states that certain actions can be performed in. If the flow + # is not in these sets of states then it is likely that the flow operation + # can not succeed. + RESETTABLE_STATES = set([ + states.INTERRUPTED, + states.SUCCESS, + states.PENDING, + states.FAILURE, + ]) + SOFT_RESETTABLE_STATES = set([ + states.INTERRUPTED, + ]) + UNINTERRUPTIBLE_STATES = set([ + states.FAILURE, + states.SUCCESS, + states.PENDING, + ]) + RUNNABLE_STATES = set([ + states.PENDING, + ]) + + def __init__(self, name, parents=None, uuid=None): + self._name = str(name) + # The state of this flow. + self._state = states.PENDING + # If this flow has a parent flow/s which need to be reverted if + # this flow fails then please include them here to allow this child + # to call the parents... + if parents: + self.parents = tuple(parents) + else: + self.parents = tuple([]) + # Any objects that want to listen when a wf/task starts/stops/completes + # or errors should be registered here. This can be used to monitor + # progress and record tasks finishing (so that it becomes possible to + # store the result of a task in some persistent or semi-persistent + # storage backend). + self.notifier = utils.TransitionNotifier() + self.task_notifier = utils.TransitionNotifier() + # Assign this flow a unique identifer. + if uuid: + self._id = str(uuid) + else: + self._id = uuidutils.generate_uuid() + # Ensure we can not change the state at the same time in 2 different + # threads. + self._state_lock = threading.RLock() + + @property + def name(self): + """A non-unique name for this flow (human readable)""" + return self._name + + @property + def uuid(self): + return self._id + + @property + def state(self): + """Provides a read-only view of the flow state.""" + return self._state + + def _change_state(self, context, new_state, check_func=None, notify=True): + old_state = None + changed = False + with self._state_lock: + if self.state != new_state: + if (not check_func or + (check_func and check_func(self.state))): + changed = True + old_state = self.state + self._state = new_state + # Don't notify while holding the lock so that the reciever of said + # notifications can actually perform operations on the given flow + # without getting into deadlock. + if notify and changed: + self.notifier.notify(self.state, details={ + 'context': context, + 'flow': self, + 'old_state': old_state, + }) + return changed + + def __str__(self): + lines = ["Flow: %s" % (self.name)] + lines.append("%s" % (self.uuid)) + lines.append("%s" % (len(self.parents))) + lines.append("%s" % (self.state)) + return "; ".join(lines) + + @abc.abstractmethod + def add(self, task): + """Adds a given task to this flow. + + Returns the uuid that is associated with the task for later operations + before and after it is ran. + """ + raise NotImplementedError() + + def add_many(self, tasks): + """Adds many tasks to this flow. + + Returns a list of uuids (one for each task added). + """ + uuids = [] + for t in tasks: + uuids.append(self.add(t)) + return uuids + + def interrupt(self): + """Attempts to interrupt the current flow and any tasks that are + currently not running in the flow. + + Returns how many tasks were interrupted (if any). + """ + def check(): + if self.state in self.UNINTERRUPTIBLE_STATES: + raise exc.InvalidStateException(("Can not interrupt when" + " in state %s") % self.state) + + check() + with self._state_lock: + check() + self._change_state(None, states.INTERRUPTED) + return 0 + + def reset(self): + """Fully resets the internal state of this flow, allowing for the flow + to be ran again. + + Note: Listeners are also reset. + """ + def check(): + if self.state not in self.RESETTABLE_STATES: + raise exc.InvalidStateException(("Can not reset when" + " in state %s") % self.state) + + check() + with self._state_lock: + check() + self.notifier.reset() + self.task_notifier.reset() + self._change_state(None, states.PENDING) + + def soft_reset(self): + """Partially resets the internal state of this flow, allowing for the + flow to be ran again from an interrupted state. + """ + def check(): + if self.state not in self.SOFT_RESETTABLE_STATES: + raise exc.InvalidStateException(("Can not soft reset when" + " in state %s") % self.state) + + check() + with self._state_lock: + check() + self._change_state(None, states.PENDING) + + @abc.abstractmethod + def run(self, context, *args, **kwargs): + """Executes the workflow.""" + raise NotImplementedError() + + def rollback(self, context, cause): + """Performs rollback of this workflow and any attached parent workflows + if present. + """ + pass diff --git a/billingstack/taskflow/functor_task.py b/billingstack/taskflow/functor_task.py new file mode 100644 index 0000000..2f834e6 --- /dev/null +++ b/billingstack/taskflow/functor_task.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved. +# Copyright (C) 2013 AT&T Labs Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import inspect + +from billingstack.taskflow import task as base + +# These arguments are ones that we will skip when parsing for requirements +# for a function to operate (when used as a task). +AUTO_ARGS = ('self', 'context', 'cls') + + +def _filter_arg(arg): + if arg in AUTO_ARGS: + return False + # In certain decorator cases it seems like we get the function to be + # decorated as an argument, we don't want to take that as a real argument. + if not isinstance(arg, basestring): + return False + return True + + +class FunctorTask(base.Task): + """Adaptor to make task from a callable + + Take any callable and make a task from it. + """ + @staticmethod + def _get_callable_name(execute_with): + """Generate a name from callable""" + im_class = getattr(execute_with, 'im_class', None) + if im_class is not None: + parts = (im_class.__module__, im_class.__name__, + execute_with.__name__) + else: + parts = (execute_with.__module__, execute_with.__name__) + return '.'.join(parts) + + def __init__(self, execute_with, **kwargs): + """Initialize FunctorTask instance with given callable and kwargs + + :param execute_with: the callable + :param kwargs: reserved keywords (all optional) are + name: name of the task, default None (auto generate) + task_id: id of the task, default None (auto generate) + revert_with: the callable to revert, default None + version: version of the task, default Task's version 1.0 + optionals: optionals of the task, default () + provides: provides of the task, default () + requires: requires of the task, default () + auto_extract: auto extract execute_with's args and put it into + requires, default True + """ + name = kwargs.pop('name', None) + task_id = kwargs.pop('task_id', None) + if name is None: + name = self._get_callable_name(execute_with) + super(FunctorTask, self).__init__(name, task_id) + self._execute_with = execute_with + self._revert_with = kwargs.pop('revert_with', None) + self.version = kwargs.pop('version', self.version) + self.optional.update(kwargs.pop('optional', ())) + self.provides.update(kwargs.pop('provides', ())) + self.requires.update(kwargs.pop('requires', ())) + if kwargs.pop('auto_extract', True): + f_args = inspect.getargspec(execute_with).args + self.requires.update([arg for arg in f_args if _filter_arg(arg)]) + if kwargs: + raise TypeError('__init__() got an unexpected keyword argument %r' + % kwargs.keys[0]) + + def __call__(self, *args, **kwargs): + return self._execute_with(*args, **kwargs) + + def revert(self, *args, **kwargs): + if self._revert_with: + return self._revert_with(*args, **kwargs) + else: + return None diff --git a/billingstack/taskflow/graph_utils.py b/billingstack/taskflow/graph_utils.py new file mode 100644 index 0000000..005924b --- /dev/null +++ b/billingstack/taskflow/graph_utils.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from billingstack.taskflow import exceptions as exc + + +LOG = logging.getLogger(__name__) + + +def connect(graph, infer_key='infer', auto_reason='auto', discard_func=None): + """Connects a graphs runners to other runners in the graph which provide + outputs for each runners requirements. + """ + + if len(graph) == 0: + return + if discard_func: + for (u, v, e_data) in graph.edges(data=True): + if discard_func(u, v, e_data): + graph.remove_edge(u, v) + for (r, r_data) in graph.nodes_iter(data=True): + requires = set(r.requires) + + # Find the ones that have already been attached manually. + manual_providers = {} + if requires: + incoming = [e[0] for e in graph.in_edges_iter([r])] + for r2 in incoming: + fulfills = requires & r2.provides + if fulfills: + LOG.debug("%s is a manual provider of %s for %s", + r2, fulfills, r) + for k in fulfills: + manual_providers[k] = r2 + requires.remove(k) + + # Anything leftover that we must find providers for?? + auto_providers = {} + if requires and r_data.get(infer_key): + for r2 in graph.nodes_iter(): + if r is r2: + continue + fulfills = requires & r2.provides + if fulfills: + graph.add_edge(r2, r, reason=auto_reason) + LOG.debug("Connecting %s as a automatic provider for" + " %s for %s", r2, fulfills, r) + for k in fulfills: + auto_providers[k] = r2 + requires.remove(k) + if not requires: + break + + # Anything still leftover?? + if requires: + # Ensure its in string format, since join will puke on + # things that are not strings. + missing = ", ".join(sorted([str(s) for s in requires])) + raise exc.MissingDependencies(r, missing) + else: + r.providers = {} + r.providers.update(auto_providers) + r.providers.update(manual_providers) diff --git a/billingstack/taskflow/patterns/__init__.py b/billingstack/taskflow/patterns/__init__.py new file mode 100644 index 0000000..1f19be5 --- /dev/null +++ b/billingstack/taskflow/patterns/__init__.py @@ -0,0 +1 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 diff --git a/billingstack/taskflow/patterns/linear_flow.py b/billingstack/taskflow/patterns/linear_flow.py new file mode 100644 index 0000000..f25feed --- /dev/null +++ b/billingstack/taskflow/patterns/linear_flow.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import collections +import functools +import logging +import threading + +from billingstack.openstack.common import excutils + +from billingstack.taskflow import decorators +from billingstack.taskflow import exceptions as exc +from billingstack.taskflow import states +from billingstack.taskflow import utils + +from billingstack.taskflow import flow + +LOG = logging.getLogger(__name__) + + +class Flow(flow.Flow): + """"A linear chain of tasks that can be applied in order as one unit and + rolled back as one unit using the reverse order that the tasks have + been applied in. + + Note(harlowja): Each task in the chain must have requirements + which are satisfied by the previous task/s in the chain. + """ + + def __init__(self, name, parents=None, uuid=None): + super(Flow, self).__init__(name, parents, uuid) + # The tasks which have been applied will be collected here so that they + # can be reverted in the correct order on failure. + self._accumulator = utils.RollbackAccumulator() + # Tasks results are stored here. Lookup is by the uuid that was + # returned from the add function. + self.results = {} + # The previously left off iterator that can be used to resume from + # the last task (if interrupted and soft-reset). + self._leftoff_at = None + # All runners to run are collected here. + self._runners = [] + self._connected = False + self._lock = threading.RLock() + # The resumption strategy to use. + self.resumer = None + + @decorators.locked + def add(self, task): + """Adds a given task to this flow.""" + assert isinstance(task, collections.Callable) + r = utils.AOTRunner(task) + r.runs_before = list(reversed(self._runners)) + self._runners.append(r) + self._reset_internals() + return r.uuid + + def _reset_internals(self): + self._connected = False + self._leftoff_at = None + + def _associate_providers(self, runner): + # Ensure that some previous task provides this input. + who_provides = {} + task_requires = runner.requires + for r in task_requires: + provider = None + for before_me in runner.runs_before: + if r in before_me.provides: + provider = before_me + break + if provider: + who_provides[r] = provider + # Ensure that the last task provides all the needed input for this + # task to run correctly. + missing_requires = task_requires - set(who_provides.keys()) + if missing_requires: + raise exc.MissingDependencies(runner, sorted(missing_requires)) + runner.providers.update(who_provides) + + def __str__(self): + lines = ["LinearFlow: %s" % (self.name)] + lines.append("%s" % (self.uuid)) + lines.append("%s" % (len(self._runners))) + lines.append("%s" % (len(self.parents))) + lines.append("%s" % (self.state)) + return "; ".join(lines) + + @decorators.locked + def remove(self, uuid): + index_removed = -1 + for (i, r) in enumerate(self._runners): + if r.uuid == uuid: + index_removed = i + break + if index_removed == -1: + raise ValueError("No runner found with uuid %s" % (uuid)) + else: + removed = self._runners.pop(index_removed) + self._reset_internals() + # Go and remove it from any runner after the removed runner since + # those runners may have had an attachment to it. + for r in self._runners[index_removed:]: + try: + r.runs_before.remove(removed) + except (IndexError, ValueError): + pass + + def __len__(self): + return len(self._runners) + + def _connect(self): + if self._connected: + return self._runners + for r in self._runners: + r.providers = {} + for r in reversed(self._runners): + self._associate_providers(r) + self._connected = True + return self._runners + + def _ordering(self): + return iter(self._connect()) + + @decorators.locked + def run(self, context, *args, **kwargs): + + def abort_if(current_state, ok_states): + if current_state not in ok_states: + return False + return True + + def resume_it(): + if self._leftoff_at is not None: + return ([], self._leftoff_at) + if self.resumer: + (finished, leftover) = self.resumer(self, self._ordering()) + else: + finished = [] + leftover = self._ordering() + return (finished, leftover) + + start_check_functor = functools.partial(abort_if, + ok_states=self.RUNNABLE_STATES) + if not self._change_state(context, states.STARTED, + check_func=start_check_functor): + return + try: + those_finished, leftover = resume_it() + except Exception: + with excutils.save_and_reraise_exception(): + self._change_state(context, states.FAILURE) + + def run_it(runner, failed=False, result=None, simulate_run=False): + try: + # Add the task to be rolled back *immediately* so that even if + # the task fails while producing results it will be given a + # chance to rollback. + rb = utils.RollbackTask(context, runner.task, result=None) + self._accumulator.add(rb) + self.task_notifier.notify(states.STARTED, details={ + 'context': context, + 'flow': self, + 'runner': runner, + }) + if not simulate_run: + result = runner(context, *args, **kwargs) + else: + if failed: + # TODO(harlowja): make this configurable?? + # If we previously failed, we want to fail again at + # the same place. + if not result: + # If no exception or exception message was provided + # or captured from the previous run then we need to + # form one for this task. + result = "%s failed running." % (runner.task) + if isinstance(result, basestring): + result = exc.InvalidStateException(result) + if not isinstance(result, Exception): + LOG.warn("Can not raise a non-exception" + " object: %s", result) + result = exc.InvalidStateException() + raise result + # Adjust the task result in the accumulator before + # notifying others that the task has finished to + # avoid the case where a listener might throw an + # exception. + rb.result = result + runner.result = result + self.results[runner.uuid] = result + self.task_notifier.notify(states.SUCCESS, details={ + 'context': context, + 'flow': self, + 'runner': runner, + }) + except Exception as e: + runner.result = e + cause = utils.FlowFailure(runner, self, e) + with excutils.save_and_reraise_exception(): + # Notify any listeners that the task has errored. + self.task_notifier.notify(states.FAILURE, details={ + 'context': context, + 'flow': self, + 'runner': runner, + }) + self.rollback(context, cause) + + run_check_functor = functools.partial(abort_if, + ok_states=[states.STARTED, + states.RESUMING]) + if len(those_finished): + if not self._change_state(context, states.RESUMING, + check_func=run_check_functor): + return + for (r, details) in those_finished: + # Fake running the task so that we trigger the same + # notifications and state changes (and rollback that + # would have happened in a normal flow). + failed = states.FAILURE in details.get('states', []) + result = details.get('result') + run_it(r, failed=failed, result=result, simulate_run=True) + + self._leftoff_at = leftover + if not self._change_state(context, states.RUNNING, + check_func=run_check_functor): + return + + was_interrupted = False + for r in leftover: + r.reset() + run_it(r) + if self.state == states.INTERRUPTED: + was_interrupted = True + break + + if not was_interrupted: + # Only gets here if everything went successfully. + self._change_state(context, states.SUCCESS) + self._leftoff_at = None + + @decorators.locked + def reset(self): + super(Flow, self).reset() + self.results = {} + self.resumer = None + self._accumulator.reset() + self._reset_internals() + + @decorators.locked + def rollback(self, context, cause): + # Performs basic task by task rollback by going through the reverse + # order that tasks have finished and asking said task to undo whatever + # it has done. If this flow has any parent flows then they will + # also be called to rollback any tasks said parents contain. + # + # Note(harlowja): if a flow can more simply revert a whole set of + # tasks via a simpler command then it can override this method to + # accomplish that. + # + # For example, if each task was creating a file in a directory, then + # it's easier to just remove the directory than to ask each task to + # delete its file individually. + self._change_state(context, states.REVERTING) + try: + self._accumulator.rollback(cause) + finally: + self._change_state(context, states.FAILURE) + # Rollback any parents flows if they exist... + for p in self.parents: + p.rollback(context, cause) diff --git a/billingstack/taskflow/patterns/threaded_flow.py b/billingstack/taskflow/patterns/threaded_flow.py new file mode 100644 index 0000000..02c2ceb --- /dev/null +++ b/billingstack/taskflow/patterns/threaded_flow.py @@ -0,0 +1,636 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from billingstack.taskflow import exceptions as exc +from billingstack.taskflow import flow +from billingstack.taskflow import graph_utils +from billingstack.taskflow import states +from billingstack.taskflow import utils + +import collections +import functools +import logging +import sys +import threading +import weakref + +from networkx.algorithms import cycles +from networkx.classes import digraph + +LOG = logging.getLogger(__name__) + + +class DependencyTimeout(exc.InvalidStateException): + """When running in parallel a task has the ability to timeout waiting for + its dependent tasks to finish, this will be raised when that occurs. + """ + pass + + +class Flow(flow.Flow): + """This flow pattern establishes tasks into a graph where each task is a + node in the graph and dependencies between tasks are edges in the graph. + When running (in parallel) each task will only be activated when its + dependencies have been satisified. When a graph is split into two or more + segments, both of those segments will be ran in parallel. + + For example lets take this small little *somewhat complicated* graph: + + X--Y--C--D + | | + A--B-- --G-- + | | |--Z(end) + E--F-- --H-- + + In this flow the following will be ran in parallel at start: + 1. X--Y + 2. A--B + 3. E--F + Note the C--D nodes will not be able to run until [Y,B,F] has completed. + After C--D completes the following will be ran in parallel: + 1. G + 2. H + Then finally Z will run (after [G,H] complete) and the flow will then have + finished executing. + """ + MUTABLE_STATES = set([states.PENDING, states.FAILURE, states.SUCCESS]) + REVERTABLE_STATES = set([states.FAILURE, states.INCOMPLETE]) + CANCELLABLE_STATES = set([states.PENDING, states.RUNNING]) + + def __init__(self, name): + super(Flow, self).__init__(name) + self._graph = digraph.DiGraph(name=name) + self._run_lock = threading.RLock() + self._cancel_lock = threading.RLock() + self._mutate_lock = threading.RLock() + # NOTE(harlowja) The locking order in this list actually matters since + # we need to make sure that users of this list do not get deadlocked + # by out of order lock access. + self._core_locks = [ + self._run_lock, + self._mutate_lock, + self._cancel_lock, + ] + self._run_locks = [ + self._run_lock, + self._mutate_lock, + ] + self._cancel_locks = [ + self._cancel_lock, + ] + self.results = {} + self.resumer = None + + def __str__(self): + lines = ["ParallelFlow: %s" % (self.name)] + lines.append("%s" % (self._graph.number_of_nodes())) + lines.append("%s" % (self.state)) + return "; ".join(lines) + + def soft_reset(self): + # The way this flow works does not allow (at the current moment) for + # you to suspend the threads and then resume them at a later time, + # instead it only supports interruption (which will cancel the threads) + # and then a full reset. + raise NotImplementedError("Threaded flow does not currently support" + " soft resetting, please try using" + " reset() instead") + + def interrupt(self): + """Currently we can not pause threads and then resume them later, not + really thinking that we should likely ever do this. + """ + raise NotImplementedError("Threaded flow does not currently support" + " interruption, please try using" + " cancel() instead") + + def reset(self): + # All locks are used so that resets can not happen while running or + # cancelling or modifying. + with utils.MultiLock(self._core_locks): + super(Flow, self).reset() + self.results = {} + self.resumer = None + + def cancel(self): + + def check(): + if self.state not in self.CANCELLABLE_STATES: + raise exc.InvalidStateException("Can not attempt cancellation" + " when in state %s" % + self.state) + + check() + cancelled = 0 + was_empty = False + + # We don't lock the other locks so that the flow can be cancelled while + # running. Further state management logic is then used while running + # to verify that the flow should still be running when it has been + # cancelled. + with utils.MultiLock(self._cancel_locks): + check() + if len(self._graph) == 0: + was_empty = True + else: + for r in self._graph.nodes_iter(): + try: + if r.cancel(blocking=False): + cancelled += 1 + except exc.InvalidStateException: + pass + if cancelled or was_empty: + self._change_state(None, states.CANCELLED) + + return cancelled + + def _find_uuid(self, uuid): + # Finds the runner for the given uuid (or returns none) + for r in self._graph.nodes_iter(): + if r.uuid == uuid: + return r + return None + + def add(self, task, timeout=None, infer=True): + """Adds a task to the given flow using the given timeout which will be + used a the timeout to wait for dependencies (if any) to be + fulfilled. + """ + def check(): + if self.state not in self.MUTABLE_STATES: + raise exc.InvalidStateException("Flow is currently in a" + " non-mutable %s state" % + (self.state)) + + # Ensure that we do a quick check to see if we can even perform this + # addition before we go about actually acquiring the lock to perform + # the actual addition. + check() + + # All locks must be acquired so that modifications can not be made + # while running, cancelling or performing a simultaneous mutation. + with utils.MultiLock(self._core_locks): + check() + runner = ThreadRunner(task, self, timeout) + self._graph.add_node(runner, infer=infer) + return runner.uuid + + def _connect(self): + """Infers and connects the edges of the given tasks by examining the + associated tasks provides and requires attributes and connecting tasks + that require items to tasks that produce said items. + """ + + # Disconnect all edges not manually created before we attempt to infer + # them so that we don't retain edges that are invalid. + def disconnect_non_user(u, v, e_data): + if e_data and e_data.get('reason') != 'manual': + return True + return False + + # Link providers to requirers. + graph_utils.connect(self._graph, + discard_func=disconnect_non_user) + + # Connect the successors & predecessors and related siblings + for r in self._graph.nodes_iter(): + r._predecessors = [] + r._successors = [] + for (r2, _me) in self._graph.in_edges_iter([r]): + r._predecessors.append(r2) + for (_me, r2) in self._graph.out_edges_iter([r]): + r._successors.append(r2) + r.siblings = [] + for r2 in self._graph.nodes_iter(): + if r2 is r or r2 in r._predecessors or r2 in r._successors: + continue + r._siblings.append(r2) + + def add_many(self, tasks): + """Adds a list of tasks to the flow.""" + + def check(): + if self.state not in self.MUTABLE_STATES: + raise exc.InvalidStateException("Flow is currently in a" + " non-mutable state %s" + % (self.state)) + + # Ensure that we do a quick check to see if we can even perform this + # addition before we go about actually acquiring the lock. + check() + + # All locks must be acquired so that modifications can not be made + # while running, cancelling or performing a simultaneous mutation. + with utils.MultiLock(self._core_locks): + check() + added = [] + for t in tasks: + added.append(self.add(t)) + return added + + def add_dependency(self, provider_uuid, consumer_uuid): + """Manually adds a dependency between a provider and a consumer.""" + + def check_and_fetch(): + if self.state not in self.MUTABLE_STATES: + raise exc.InvalidStateException("Flow is currently in a" + " non-mutable state %s" + % (self.state)) + provider = self._find_uuid(provider_uuid) + if not provider or not self._graph.has_node(provider): + raise exc.InvalidStateException("Can not add a dependency " + "from unknown uuid %s" % + (provider_uuid)) + consumer = self._find_uuid(consumer_uuid) + if not consumer or not self._graph.has_node(consumer): + raise exc.InvalidStateException("Can not add a dependency " + "to unknown uuid %s" + % (consumer_uuid)) + if provider is consumer: + raise exc.InvalidStateException("Can not add a dependency " + "to loop via uuid %s" + % (consumer_uuid)) + return (provider, consumer) + + check_and_fetch() + + # All locks must be acquired so that modifications can not be made + # while running, cancelling or performing a simultaneous mutation. + with utils.MultiLock(self._core_locks): + (provider, consumer) = check_and_fetch() + self._graph.add_edge(provider, consumer, reason='manual') + LOG.debug("Connecting %s as a manual provider for %s", + provider, consumer) + + def run(self, context, *args, **kwargs): + """Executes the given flow using the given context and args/kwargs.""" + + def abort_if(current_state, ok_states): + if current_state in (states.CANCELLED,): + return False + if current_state not in ok_states: + return False + return True + + def check(): + if self.state not in self.RUNNABLE_STATES: + raise exc.InvalidStateException("Flow is currently unable " + "to be ran in state %s" + % (self.state)) + + def connect_and_verify(): + """Do basic sanity tests on the graph structure.""" + if len(self._graph) == 0: + return + self._connect() + degrees = [g[1] for g in self._graph.in_degree_iter()] + zero_degrees = [d for d in degrees if d == 0] + if not zero_degrees: + # If every task depends on something else to produce its input + # then we will be in a deadlock situation. + raise exc.InvalidStateException("No task has an in-degree" + " of zero") + self_loops = self._graph.nodes_with_selfloops() + if self_loops: + # A task that has a dependency on itself will never be able + # to run. + raise exc.InvalidStateException("%s tasks have been detected" + " with dependencies on" + " themselves" % + len(self_loops)) + simple_cycles = len(cycles.recursive_simple_cycles(self._graph)) + if simple_cycles: + # A task loop will never be able to run, unless it somehow + # breaks that loop. + raise exc.InvalidStateException("%s tasks have been detected" + " with dependency loops" % + simple_cycles) + + def run_it(result_cb, args, kwargs): + check_runnable = functools.partial(abort_if, + ok_states=self.RUNNABLE_STATES) + if self._change_state(context, states.RUNNING, + check_func=check_runnable): + self.results = {} + if len(self._graph) == 0: + return + for r in self._graph.nodes_iter(): + r.reset() + r._result_cb = result_cb + executor = utils.ThreadGroupExecutor() + for r in self._graph.nodes_iter(): + executor.submit(r, *args, **kwargs) + executor.await_termination() + + def trigger_rollback(failures): + if not failures: + return + causes = [] + for r in failures: + causes.append(utils.FlowFailure(r, self, + r.exc, r.exc_info)) + try: + self.rollback(context, causes) + except exc.InvalidStateException: + pass + finally: + # TODO(harlowja): re-raise a combined exception when + # there are more than one failures?? + for f in failures: + if all(f.exc_info): + raise f.exc_info[0], f.exc_info[1], f.exc_info[2] + + def handle_results(): + # Isolate each runner state into groups so that we can easily tell + # which ones failed, cancelled, completed... + groups = collections.defaultdict(list) + for r in self._graph.nodes_iter(): + groups[r.state].append(r) + for r in self._graph.nodes_iter(): + if r not in groups.get(states.FAILURE, []) and r.has_ran(): + self.results[r.uuid] = r.result + if groups[states.FAILURE]: + self._change_state(context, states.FAILURE) + trigger_rollback(groups[states.FAILURE]) + elif (groups[states.CANCELLED] or groups[states.PENDING] + or groups[states.TIMED_OUT] or groups[states.STARTED]): + self._change_state(context, states.INCOMPLETE) + else: + check_ran = functools.partial(abort_if, + ok_states=[states.RUNNING]) + self._change_state(context, states.SUCCESS, + check_func=check_ran) + + def get_resumer_cb(): + if not self.resumer: + return None + (ran, _others) = self.resumer(self, self._graph.nodes_iter()) + + def fetch_results(runner): + for (r, metadata) in ran: + if r is runner: + return (True, metadata.get('result')) + return (False, None) + + result_cb = fetch_results + return result_cb + + args = [context] + list(args) + check() + + # Only acquire the run lock (but use further state checking) and the + # mutation lock to stop simultaneous running and simultaneous mutating + # which are not allowed on a running flow. Allow simultaneous cancel + # by performing repeated state checking while running. + with utils.MultiLock(self._run_locks): + check() + connect_and_verify() + try: + run_it(get_resumer_cb(), args, kwargs) + finally: + handle_results() + + def rollback(self, context, cause): + """Rolls back all tasks that are *not* still pending or cancelled.""" + + def check(): + if self.state not in self.REVERTABLE_STATES: + raise exc.InvalidStateException("Flow is currently unable " + "to be rolled back in " + "state %s" % (self.state)) + + check() + + # All locks must be acquired so that modifications can not be made + # while another entity is running, rolling-back, cancelling or + # performing a mutation operation. + with utils.MultiLock(self._core_locks): + check() + accum = utils.RollbackAccumulator() + for r in self._graph.nodes_iter(): + if r.has_ran(): + accum.add(utils.RollbackTask(context, r.task, r.result)) + try: + self._change_state(context, states.REVERTING) + accum.rollback(cause) + finally: + self._change_state(context, states.FAILURE) + + +class ThreadRunner(utils.Runner): + """A helper class that will use a countdown latch to avoid calling its + callable object until said countdown latch has emptied. After it has + been emptied the predecessor tasks will be examined for dependent results + and said results will then be provided to call the runners callable + object. + + TODO(harlowja): this could be a 'future' like object in the future since it + is starting to have the same purpose and usage (in a way). Likely switch + this over to the task details object or a subclass of it??? + """ + RESETTABLE_STATES = set([states.PENDING, states.SUCCESS, states.FAILURE, + states.CANCELLED]) + RUNNABLE_STATES = set([states.PENDING]) + CANCELABLE_STATES = set([states.PENDING]) + SUCCESS_STATES = set([states.SUCCESS]) + CANCEL_SUCCESSORS_WHEN = set([states.FAILURE, states.CANCELLED, + states.TIMED_OUT]) + NO_RAN_STATES = set([states.CANCELLED, states.PENDING, states.TIMED_OUT, + states.RUNNING]) + + def __init__(self, task, flow, timeout): + super(ThreadRunner, self).__init__(task) + # Use weak references to give the GC a break. + self._flow = weakref.proxy(flow) + self._notifier = flow.task_notifier + self._timeout = timeout + self._state = states.PENDING + self._run_lock = threading.RLock() + # Use the flows state lock so that state notifications are not sent + # simultaneously for a given flow. + self._state_lock = flow._state_lock + self._cancel_lock = threading.RLock() + self._latch = utils.CountDownLatch() + # Any related family. + self._predecessors = [] + self._successors = [] + self._siblings = [] + # Ensure we capture any exceptions that may have been triggered. + self.exc = None + self.exc_info = (None, None, None) + # This callback will be called before the underlying task is actually + # returned and it should either return a tuple of (has_result, result) + self._result_cb = None + + @property + def state(self): + return self._state + + def has_ran(self): + if self.state in self.NO_RAN_STATES: + return False + return True + + def _change_state(self, context, new_state): + old_state = None + changed = False + with self._state_lock: + if self.state != new_state: + old_state = self.state + self._state = new_state + changed = True + # Don't notify while holding the lock so that the reciever of said + # notifications can actually perform operations on the given runner + # without getting into deadlock. + if changed and self._notifier: + self._notifier.notify(self.state, details={ + 'context': context, + 'flow': self._flow, + 'old_state': old_state, + 'runner': self, + }) + + def cancel(self, blocking=True): + + def check(): + if self.state not in self.CANCELABLE_STATES: + raise exc.InvalidStateException("Runner not in a cancelable" + " state: %s" % (self.state)) + + # Check before as a quick way out of attempting to acquire the more + # heavy-weight lock. Then acquire the lock (which should not be + # possible if we are currently running) and set the state (if still + # applicable). + check() + acquired = False + cancelled = False + try: + acquired = self._cancel_lock.acquire(blocking=blocking) + if acquired: + check() + cancelled = True + self._change_state(None, states.CANCELLED) + finally: + if acquired: + self._cancel_lock.release() + return cancelled + + def reset(self): + + def check(): + if self.state not in self.RESETTABLE_STATES: + raise exc.InvalidStateException("Runner not in a resettable" + " state: %s" % (self.state)) + + def do_reset(): + self._latch.count = len(self._predecessors) + self.exc = None + self.exc_info = (None, None, None) + self.result = None + self._change_state(None, states.PENDING) + + # We need to acquire both locks here so that we can not be running + # or being cancelled at the same time we are resetting. + check() + with self._run_lock: + check() + with self._cancel_lock: + check() + do_reset() + + @property + def runs_before(self): + # NOTE(harlowja): this list may change, depending on which other + # runners have completed (or are currently actively running), so + # this is why this is a property instead of a semi-static defined list + # like in the AOT class. The list should only get bigger and not + # smaller so it should be fine to filter on runners that have completed + # successfully. + finished_ok = [] + for r in self._siblings: + if r.has_ran() and r.state in self.SUCCESS_STATES: + finished_ok.append(r) + return finished_ok + + def __call__(self, context, *args, **kwargs): + + def is_runnable(): + if self.state not in self.RUNNABLE_STATES: + return False + return True + + def run(*args, **kwargs): + try: + self._change_state(context, states.RUNNING) + has_result = False + if self._result_cb: + has_result, self.result = self._result_cb(self) + if not has_result: + super(ThreadRunner, self).__call__(*args, **kwargs) + self._change_state(context, states.SUCCESS) + except Exception as e: + self._change_state(context, states.FAILURE) + self.exc = e + self.exc_info = sys.exc_info() + + def signal(): + if not self._successors: + return + if self.state in self.CANCEL_SUCCESSORS_WHEN: + for r in self._successors: + try: + r.cancel(blocking=False) + except exc.InvalidStateException: + pass + for r in self._successors: + try: + r._latch.countDown() + except Exception: + LOG.exception("Failed decrementing %s latch", r) + + # We check before to avoid attempting to acquire the lock when we are + # known to be in a non-runnable state. + if not is_runnable(): + return + args = [context] + list(args) + with self._run_lock: + # We check after we now own the run lock since a previous thread + # could have exited and released that lock and set the state to + # not runnable. + if not is_runnable(): + return + may_proceed = self._latch.await(self._timeout) + # We now acquire the cancel lock so that we can be assured that + # we have not been cancelled by another entity. + with self._cancel_lock: + try: + # If we have been cancelled after awaiting and timing out + # ensure that we alter the state to show timed out (but + # not if we have been cancelled, since our state should + # be cancelled instead). This is done after acquiring the + # cancel lock so that we will not try to overwrite another + # entity trying to set the runner to the cancel state. + if not may_proceed and self.state != states.CANCELLED: + self._change_state(context, states.TIMED_OUT) + # We at this point should only have been able to time out + # or be cancelled, no other state transitions should have + # been possible. + if self.state not in (states.CANCELLED, states.TIMED_OUT): + run(*args, **kwargs) + finally: + signal() diff --git a/billingstack/taskflow/states.py b/billingstack/taskflow/states.py new file mode 100644 index 0000000..b3ff929 --- /dev/null +++ b/billingstack/taskflow/states.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# Job states. +CLAIMED = 'CLAIMED' +FAILURE = 'FAILURE' +PENDING = 'PENDING' +RUNNING = 'RUNNING' +SUCCESS = 'SUCCESS' +UNCLAIMED = 'UNCLAIMED' + +# Flow states. +FAILURE = FAILURE +INTERRUPTED = 'INTERRUPTED' +PENDING = 'PENDING' +RESUMING = 'RESUMING' +REVERTING = 'REVERTING' +RUNNING = RUNNING +STARTED = 'STARTED' +SUCCESS = SUCCESS +CANCELLED = 'CANCELLED' +INCOMPLETE = 'INCOMPLETE' + +# Task states. +FAILURE = FAILURE +STARTED = STARTED +SUCCESS = SUCCESS +TIMED_OUT = 'TIMED_OUT' +CANCELLED = CANCELLED diff --git a/billingstack/taskflow/task.py b/billingstack/taskflow/task.py new file mode 100644 index 0000000..4a88c54 --- /dev/null +++ b/billingstack/taskflow/task.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc + +from billingstack.openstack.common import uuidutils +from billingstack.taskflow import utils + + +class Task(object): + """An abstraction that defines a potential piece of work that can be + applied and can be reverted to undo the work as a single unit. + """ + __metaclass__ = abc.ABCMeta + + def __init__(self, name, task_id=None): + if task_id: + self._uuid = task_id + else: + self._uuid = uuidutils.generate_uuid() + self._name = name + # An *immutable* input 'resource' name set this task depends + # on existing before this task can be applied. + self.requires = set() + # An *immutable* input 'resource' name set this task would like to + # depends on existing before this task can be applied (but does not + # strongly depend on existing). + self.optional = set() + # An *immutable* output 'resource' name set this task + # produces that other tasks may depend on this task providing. + self.provides = set() + # This identifies the version of the task to be ran which + # can be useful in resuming older versions of tasks. Standard + # major, minor version semantics apply. + self.version = (1, 0) + + @property + def uuid(self): + return self._uuid + + @property + def name(self): + return self._name + + def __str__(self): + return "%s==%s" % (self.name, utils.get_task_version(self)) + + @abc.abstractmethod + def __call__(self, context, *args, **kwargs): + """Activate a given task which will perform some operation and return. + + This method can be used to apply some given context and given set + of args and kwargs to accomplish some goal. Note that the result + that is returned needs to be serializable so that it can be passed + back into this task if reverting is triggered. + """ + + def revert(self, context, result, cause): + """Revert this task using the given context, result that the apply + provided as well as any information which may have caused + said reversion. + """ diff --git a/billingstack/taskflow/utils.py b/billingstack/taskflow/utils.py new file mode 100644 index 0000000..686b2d9 --- /dev/null +++ b/billingstack/taskflow/utils.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- + +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. +# Copyright (C) 2013 Rackspace Hosting All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import collections +import contextlib +import copy +import logging +import re +import sys +import threading +import threading2 +import time + +from billingstack.openstack.common import uuidutils + +TASK_FACTORY_ATTRIBUTE = '_TaskFlow_task_factory' +LOG = logging.getLogger(__name__) + + +def await(check_functor, timeout=None): + if timeout is not None: + end_time = time.time() + max(0, timeout) + else: + end_time = None + # Use the same/similar scheme that the python condition class uses. + delay = 0.0005 + while not check_functor(): + time.sleep(delay) + if end_time is not None: + remaining = end_time - time.time() + if remaining <= 0: + return False + delay = min(delay * 2, remaining, 0.05) + else: + delay = min(delay * 2, 0.05) + return True + + +def get_task_version(task): + """Gets a tasks *string* version, whether it is a task object/function.""" + task_version = getattr(task, 'version') + if isinstance(task_version, (list, tuple)): + task_version = '.'.join(str(item) for item in task_version) + if task_version is not None and not isinstance(task_version, basestring): + task_version = str(task_version) + return task_version + + +def is_version_compatible(version_1, version_2): + """Checks for major version compatibility of two *string" versions.""" + if version_1 == version_2: + # Equivalent exactly, so skip the rest. + return True + + def _convert_to_pieces(version): + try: + pieces = [] + for p in version.split("."): + p = p.strip() + if not len(p): + pieces.append(0) + continue + # Clean off things like 1alpha, or 2b and just select the + # digit that starts that entry instead. + p_match = re.match(r"(\d+)([A-Za-z]*)(.*)", p) + if p_match: + p = p_match.group(1) + pieces.append(int(p)) + except (AttributeError, TypeError, ValueError): + pieces = [] + return pieces + + version_1_pieces = _convert_to_pieces(version_1) + version_2_pieces = _convert_to_pieces(version_2) + if len(version_1_pieces) == 0 or len(version_2_pieces) == 0: + return False + + # Ensure major version compatibility to start. + major1 = version_1_pieces[0] + major2 = version_2_pieces[0] + if major1 != major2: + return False + return True + + +class MultiLock(object): + """A class which can attempt to obtain many locks at once and release + said locks when exiting. + + Useful as a context manager around many locks (instead of having to nest + said individual context managers). + """ + + def __init__(self, locks): + assert len(locks) > 0, "Zero locks requested" + self._locks = locks + self._locked = [False] * len(locks) + + def __enter__(self): + + def is_locked(lock): + # NOTE(harlowja): the threading2 lock doesn't seem to have this + # attribute, so thats why we are checking it existing first. + if hasattr(lock, 'locked'): + return lock.locked() + return False + + for i in xrange(0, len(self._locked)): + if self._locked[i] or is_locked(self._locks[i]): + raise threading.ThreadError("Lock %s not previously released" + % (i + 1)) + self._locked[i] = False + for (i, lock) in enumerate(self._locks): + self._locked[i] = lock.acquire() + + def __exit__(self, type, value, traceback): + for (i, locked) in enumerate(self._locked): + try: + if locked: + self._locks[i].release() + self._locked[i] = False + except threading.ThreadError: + LOG.exception("Unable to release lock %s", i + 1) + + +class CountDownLatch(object): + """Similar in concept to the java count down latch.""" + + def __init__(self, count=0): + self.count = count + self.lock = threading.Condition() + + def countDown(self): + with self.lock: + self.count -= 1 + if self.count <= 0: + self.lock.notifyAll() + + def await(self, timeout=None): + end_time = None + if timeout is not None: + timeout = max(0, timeout) + end_time = time.time() + timeout + time_up = False + with self.lock: + while True: + # Stop waiting on these 2 conditions. + if time_up or self.count <= 0: + break + # Was this a spurious wakeup or did we really end?? + self.lock.wait(timeout=timeout) + if end_time is not None: + if time.time() >= end_time: + time_up = True + else: + # Reduce the timeout so that we don't wait extra time + # over what we initially were requested to. + timeout = end_time - time.time() + return self.count <= 0 + + +class LastFedIter(object): + """An iterator which yields back the first item and then yields back + results from the provided iterator. + """ + + def __init__(self, first, rest_itr): + self.first = first + self.rest_itr = rest_itr + + def __iter__(self): + yield self.first + for i in self.rest_itr: + yield i + + +class ThreadGroupExecutor(object): + """A simple thread executor that spins up new threads (or greenthreads) for + each task to be completed (no pool limit is enforced). + + TODO(harlowja): Likely if we use the more advanced executors that come with + the concurrent.futures library we can just get rid of this. + """ + + def __init__(self, daemonize=True): + self._threads = [] + self._group = threading2.ThreadGroup() + self._daemonize = daemonize + + def submit(self, fn, *args, **kwargs): + t = threading2.Thread(target=fn, group=self._group, + args=args, kwargs=kwargs) + t.daemon = self._daemonize + self._threads.append(t) + t.start() + + def await_termination(self, timeout=None): + if not self._threads: + return + return self._group.join(timeout) + + +class FlowFailure(object): + """When a task failure occurs the following object will be given to revert + and can be used to interrogate what caused the failure. + """ + + def __init__(self, runner, flow, exc, exc_info=None): + self.runner = runner + self.flow = flow + self.exc = exc + if not exc_info: + self.exc_info = sys.exc_info() + else: + self.exc_info = exc_info + + +class RollbackTask(object): + """A helper task that on being called will call the underlying callable + tasks revert method (if said method exists). + """ + + def __init__(self, context, task, result): + self.task = task + self.result = result + self.context = context + + def __str__(self): + return str(self.task) + + def __call__(self, cause): + if ((hasattr(self.task, "revert") and + isinstance(self.task.revert, collections.Callable))): + self.task.revert(self.context, self.result, cause) + + +class Runner(object): + """A helper class that wraps a task and can find the needed inputs for + the task to run, as well as providing a uuid and other useful functionality + for users of the task. + + TODO(harlowja): replace with the task details object or a subclass of + that??? + """ + + def __init__(self, task, uuid=None): + assert isinstance(task, collections.Callable) + task_factory = getattr(task, TASK_FACTORY_ATTRIBUTE, None) + if task_factory: + self.task = task_factory(task) + else: + self.task = task + self.providers = {} + self.result = None + if not uuid: + self._id = uuidutils.generate_uuid() + else: + self._id = str(uuid) + + @property + def uuid(self): + return str(self._id) + + @property + def requires(self): + return self.task.requires + + @property + def provides(self): + return self.task.provides + + @property + def optional(self): + return self.task.optional + + @property + def runs_before(self): + return [] + + @property + def version(self): + return get_task_version(self.task) + + @property + def name(self): + return self.task.name + + def reset(self): + self.result = None + + def __str__(self): + lines = ["Runner: %s" % (self.name)] + lines.append("%s" % (self.uuid)) + lines.append("%s" % (self.version)) + return "; ".join(lines) + + def __call__(self, *args, **kwargs): + # Find all of our inputs first. + kwargs = dict(kwargs) + for (k, who_made) in self.providers.iteritems(): + if k in kwargs: + continue + try: + kwargs[k] = who_made.result[k] + except (TypeError, KeyError): + pass + optional_keys = self.optional + optional_keys = optional_keys - set(kwargs.keys()) + for k in optional_keys: + for who_ran in self.runs_before: + matched = False + if k in who_ran.provides: + try: + kwargs[k] = who_ran.result[k] + matched = True + except (TypeError, KeyError): + pass + if matched: + break + # Ensure all required keys are either existent or set to none. + for k in self.requires: + if k not in kwargs: + kwargs[k] = None + # And now finally run. + self.result = self.task(*args, **kwargs) + return self.result + + +class AOTRunner(Runner): + """A runner that knows who runs before this runner ahead of time from a + known list of previous runners. + """ + + def __init__(self, task): + super(AOTRunner, self).__init__(task) + self._runs_before = [] + + @property + def runs_before(self): + return self._runs_before + + @runs_before.setter + def runs_before(self, runs_before): + self._runs_before = list(runs_before) + + +class TransitionNotifier(object): + """A utility helper class that can be used to subscribe to + notifications of events occuring as well as allow a entity to post said + notifications to subscribers. + """ + + RESERVED_KEYS = ('details',) + ANY = '*' + + def __init__(self): + self._listeners = collections.defaultdict(list) + + def reset(self): + self._listeners = collections.defaultdict(list) + + def notify(self, state, details): + listeners = list(self._listeners.get(self.ANY, [])) + for i in self._listeners[state]: + if i not in listeners: + listeners.append(i) + if not listeners: + return + for (callback, args, kwargs) in listeners: + if args is None: + args = [] + if kwargs is None: + kwargs = {} + kwargs['details'] = details + try: + callback(state, *args, **kwargs) + except Exception: + LOG.exception(("Failure calling callback %s to notify about" + " state transition %s"), callback, state) + + def register(self, state, callback, args=None, kwargs=None): + assert isinstance(callback, collections.Callable) + for i, (cb, args, kwargs) in enumerate(self._listeners.get(state, [])): + if cb is callback: + raise ValueError("Callback %s already registered" % (callback)) + if kwargs: + for k in self.RESERVED_KEYS: + if k in kwargs: + raise KeyError(("Reserved key '%s' not allowed in " + "kwargs") % k) + kwargs = copy.copy(kwargs) + if args: + args = copy.copy(args) + self._listeners[state].append((callback, args, kwargs)) + + def deregister(self, state, callback): + if state not in self._listeners: + return + for i, (cb, args, kwargs) in enumerate(self._listeners[state]): + if cb is callback: + self._listeners[state].pop(i) + break + + +class RollbackAccumulator(object): + """A utility class that can help in organizing 'undo' like code + so that said code be rolled back on failure (automatically or manually) + by activating rollback callables that were inserted during said codes + progression. + """ + + def __init__(self): + self._rollbacks = [] + + def add(self, *callables): + self._rollbacks.extend(callables) + + def reset(self): + self._rollbacks = [] + + def __len__(self): + return len(self._rollbacks) + + def __enter__(self): + return self + + def rollback(self, cause): + LOG.warn("Activating %s rollbacks due to %s.", len(self), cause) + for (i, f) in enumerate(reversed(self._rollbacks)): + LOG.debug("Calling rollback %s: %s", i + 1, f) + try: + f(cause) + except Exception: + LOG.exception(("Failed rolling back %s: %s due " + "to inner exception."), i + 1, f) + + def __exit__(self, type, value, tb): + if any((value, type, tb)): + self.rollback(value) + + +class ReaderWriterLock(object): + """A simple reader-writer lock. + + Several readers can hold the lock simultaneously, and only one writer. + Write locks have priority over reads to prevent write starvation. + + Public domain @ http://majid.info/blog/a-reader-writer-lock-for-python/ + """ + + def __init__(self): + self.rwlock = 0 + self.writers_waiting = 0 + self.monitor = threading.Lock() + self.readers_ok = threading.Condition(self.monitor) + self.writers_ok = threading.Condition(self.monitor) + + @contextlib.contextmanager + def acquire(self, read=True): + """Acquire a read or write lock in a context manager.""" + try: + if read: + self.acquire_read() + else: + self.acquire_write() + yield self + finally: + self.release() + + def acquire_read(self): + """Acquire a read lock. + + Several threads can hold this typeof lock. + It is exclusive with write locks. + """ + + self.monitor.acquire() + while self.rwlock < 0 or self.writers_waiting: + self.readers_ok.wait() + self.rwlock += 1 + self.monitor.release() + + def acquire_write(self): + """Acquire a write lock. + + Only one thread can hold this lock, and only when no read locks + are also held. + """ + + self.monitor.acquire() + while self.rwlock != 0: + self.writers_waiting += 1 + self.writers_ok.wait() + self.writers_waiting -= 1 + self.rwlock = -1 + self.monitor.release() + + def release(self): + """Release a lock, whether read or write.""" + + self.monitor.acquire() + if self.rwlock < 0: + self.rwlock = 0 + else: + self.rwlock -= 1 + wake_writers = self.writers_waiting and self.rwlock == 0 + wake_readers = self.writers_waiting == 0 + self.monitor.release() + if wake_writers: + self.writers_ok.acquire() + self.writers_ok.notify() + self.writers_ok.release() + elif wake_readers: + self.readers_ok.acquire() + self.readers_ok.notifyAll() + self.readers_ok.release() diff --git a/billingstack/tasks.py b/billingstack/tasks.py new file mode 100644 index 0000000..f23c35c --- /dev/null +++ b/billingstack/tasks.py @@ -0,0 +1,85 @@ +# -*- encoding: utf-8 -*- +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack.openstack.common import log +from billingstack.openstack.common.gettextutils import _ +from billingstack.taskflow import task + + +LOG = log.getLogger(__name__) + + +def _make_task_name(cls, prefix="default", addons=None): + components = [cls.__module__, cls.__name__] + if addons: + for a in addons: + components.append(str(a)) + return "%s:%s" % (prefix, ".".join(components)) + + +def _attach_debug_listeners(flow): + """Sets up a nice set of debug listeners for the flow. + + These listeners will log when tasks/flows are transitioning from state to + state so that said states can be seen in the debug log output which is very + useful for figuring out where problems are occuring. + """ + + def flow_log_change(state, details): + LOG.debug(_("%(flow)s has moved into state %(state)s from state" + " %(old_state)s") % {'state': state, + 'old_state': details.get('old_state'), + 'flow': details['flow']}) + + def task_log_change(state, details): + LOG.debug(_("%(flow)s has moved %(runner)s into state %(state)s with" + " result: %(result)s") % {'state': state, + 'flow': details['flow'], + 'runner': details['runner'], + 'result': details.get('result')}) + + # Register * for all state changes (and not selective state changes to be + # called upon) since all the changes is more useful. + flow.notifier.register('*', flow_log_change) + flow.task_notifier.register('*', task_log_change) + return flow + + +class RootTask(task.Task): + def __init__(self, name=None, **kw): + name = name or _make_task_name(self.__class__, **kw) + super(RootTask, self).__init__(name) + + +class ValuesInjectTask(RootTask): + """ + This injects a dict into the flow. + + This injection is done so that the keys (and values) provided can be + dependended on by tasks further down the line. Since taskflow is dependency + based this can be considered the bootstrapping task that provides an + initial set of values for other tasks to get started with. If this did not + exist then tasks would fail locating there dependent tasks and the values + said dependent tasks produce. + + Reversion strategy: N/A + """ + def __init__(self, values, **kw): + super(ValuesInjectTask, self).__init__(**kw) + self.provides.update(values.keys()) + self._values = values + + def __call__(self, context): + return dict(self._values) diff --git a/taskflow.conf b/taskflow.conf new file mode 100644 index 0000000..d71164a --- /dev/null +++ b/taskflow.conf @@ -0,0 +1,7 @@ +[DEFAULT] + +# The list of primitives to copy from taskflow +primitives=flow.threaded_flow,flow.linear_flow,task + +# The base module to hold the copy of taskflow +base=billingstack diff --git a/tools/pip-requires b/tools/pip-requires index 4b03ff6..ab08c34 100644 --- a/tools/pip-requires +++ b/tools/pip-requires @@ -16,3 +16,6 @@ Routes>=1.12.3 stevedore>=0.10 WebOb>=1.2.3,<1.3 https://github.com/stackforge/wsme/archive/master.zip#egg=WSME +# Taskflow +threading2 +networkx From 9d3df7ff0cf2e8c307a2153fdc0d8cf4aac76332 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Tue, 27 Aug 2013 18:07:43 +0200 Subject: [PATCH 178/182] Add some errorhandling to tasks and states blueprint taskflow Change-Id: Ie4147eadb5fa436cd28663082d78fd001a7e3b81 --- billingstack/api/v2/models.py | 5 ++++- .../collector/flows/gateway_configuration.py | 7 +++++++ billingstack/collector/flows/payment_method.py | 14 +++++++++++--- billingstack/collector/states.py | 3 ++- 4 files changed, 24 insertions(+), 5 deletions(-) diff --git a/billingstack/api/v2/models.py b/billingstack/api/v2/models.py index 3831206..58cccda 100644 --- a/billingstack/api/v2/models.py +++ b/billingstack/api/v2/models.py @@ -159,7 +159,8 @@ class PGConfig(Base): merchant_id = text provider_id = text - is_default = bool + state = text + properties = DictType(key_type=text, value_type=property_type) @@ -172,6 +173,8 @@ class PaymentMethod(Base): customer_id = text provider_config_id = text + state = text + properties = DictType(key_type=text, value_type=property_type) diff --git a/billingstack/collector/flows/gateway_configuration.py b/billingstack/collector/flows/gateway_configuration.py index 0ea7d2d..521a0a1 100644 --- a/billingstack/collector/flows/gateway_configuration.py +++ b/billingstack/collector/flows/gateway_configuration.py @@ -17,6 +17,7 @@ # under the License. from billingstack import exceptions from billingstack import tasks +from billingstack.collector import states from billingstack.openstack.common import log from billingstack.payment_gateway import get_provider from billingstack.taskflow.patterns import linear_flow, threaded_flow @@ -35,6 +36,7 @@ def __init__(self, storage, **kw): self.storage = storage def __call__(self, context, gateway_config): + gateway_config['state'] = states.VERIFYING values = self.storage.create_pg_config(context, gateway_config) return {'gateway_config': values} @@ -96,10 +98,15 @@ def __init__(self, storage, **kw): def __call__(self, ctxt, gateway_config, gateway_provider): gateway_provider_cls = get_provider[gateway_provider['name']] gateway_provider_obj = gateway_provider_cls(gateway_config) + try: gateway_provider_obj.verify_config() except exceptions.ConfigurationError: + self.storage.update_pg_config( + ctxt, gateway_config['id'], {'state': states.INVALID}) raise + self.storage.update_pg_config( + ctxt, gateway_config['id'], {'state': states.ACTIVE}) def create_flow(storage, values): diff --git a/billingstack/collector/flows/payment_method.py b/billingstack/collector/flows/payment_method.py index 6f80269..b5ef272 100644 --- a/billingstack/collector/flows/payment_method.py +++ b/billingstack/collector/flows/payment_method.py @@ -15,7 +15,9 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +from billingstack import exceptions from billingstack import tasks +from billingstack.collector import states from billingstack.openstack.common import log from billingstack.payment_gateway import get_provider from billingstack.taskflow.patterns import linear_flow, threaded_flow @@ -37,6 +39,7 @@ def __init__(self, storage, **kw): self.storage = storage def __call__(self, ctxt, payment_method): + payment_method['state'] = states.PENDING values = self.storage.create_payment_method(ctxt, payment_method) return {'payment_method': values} @@ -97,9 +100,14 @@ def __call__(self, ctxt, payment_method, gateway_config, gateway_provider): gateway_provider_cls = get_provider(gateway_provider['name']) gateway_provider_obj = gateway_provider_cls(gateway_config) - gateway_provider_obj.create_payment_method( - payment_method['customer_id'], - payment_method) + try: + gateway_provider_obj.create_payment_method( + payment_method['customer_id'], + payment_method) + except exceptions.BadRequest: + self.storage.update_payment_method( + ctxt, payment_method['id'], {'status': states.INVALID}) + raise def create_flow(storage, payment_method): diff --git a/billingstack/collector/states.py b/billingstack/collector/states.py index e9aa9fc..d883742 100644 --- a/billingstack/collector/states.py +++ b/billingstack/collector/states.py @@ -16,5 +16,6 @@ # License for the specific language governing permissions and limitations # under the License. PENDING = u'PENDING' -CREATED = u'CREATED' +VERIFYING = u'VERIFYING' +ACTIVE = u'ACTIVE' INVALID = u'INVALID' From e9d2aac34b59753790a185766eaa20a3f3912213 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Fri, 30 Aug 2013 11:32:44 +0200 Subject: [PATCH 179/182] Switches some tasks over to use taskflow blueprint taskflow-central Change-Id: I0b735a8944b44b5474b9df11a1051609da433016 --- billingstack/central/flows/__init__.py | 0 billingstack/central/flows/merchant.py | 49 ++++++++++++++++++++++++++ billingstack/central/service.py | 5 ++- 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 billingstack/central/flows/__init__.py create mode 100644 billingstack/central/flows/merchant.py diff --git a/billingstack/central/flows/__init__.py b/billingstack/central/flows/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/billingstack/central/flows/merchant.py b/billingstack/central/flows/merchant.py new file mode 100644 index 0000000..7a6cf36 --- /dev/null +++ b/billingstack/central/flows/merchant.py @@ -0,0 +1,49 @@ +# -*- encoding: utf-8 -*- +# +# Copyright 2013 Hewlett-Packard Development Company, L.P. +# +# Author: Endre Karlson +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from billingstack import tasks +from billingstack.openstack.common import log +from billingstack.taskflow.patterns import linear_flow + +ACTION = 'merchant:create' + +LOG = log.getLogger(__name__) + + +class EntryCreateTask(tasks.RootTask): + def __init__(self, storage, **kw): + super(EntryCreateTask, self).__init__(**kw) + self.requires.update(['merchant']) + self.provides.update(['merchant']) + self.storage = storage + + def __call__(self, context, merchant): + values = self.storage.create_merchant(context, merchant) + return {'merchant': values} + + +def create_flow(storage, values): + flow = linear_flow.Flow(ACTION) + + flow.add(tasks.ValuesInjectTask( + {'merchant': values}, + prefix=ACTION + ':initial')) + + entry_task = EntryCreateTask(storage, prefix=ACTION) + entry_task_id = flow.add(entry_task) + + return entry_task_id, tasks._attach_debug_listeners(flow) diff --git a/billingstack/central/service.py b/billingstack/central/service.py index f2dd63d..b2ffceb 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -19,6 +19,7 @@ from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service from billingstack.openstack.common import service as os_service +from billingstack.central.flows import merchant from billingstack.storage.utils import get_connection from billingstack import service as bs_service @@ -102,7 +103,9 @@ def get_pg_provider(self, ctxt, pgp_id): # Merchant def create_merchant(self, ctxt, values): - return self.storage_conn.create_merchant(ctxt, values) + id_, flow = merchant.create_flow(self.storage_conn, values) + flow.run(ctxt) + return flow.results[id_]['merchant'] def list_merchants(self, ctxt, **kw): return self.storage_conn.list_merchants(ctxt, **kw) From cd8f0b1e8ec79d0e421bfdba2e4a8f6ca0101098 Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Wed, 16 Oct 2013 20:02:31 +0200 Subject: [PATCH 180/182] Bring code up to speed. * Use taskflow as a library * Move requires to root * Fix git path * Update oslo Change-Id: Iae8329a639e26881fbc3286479a429ae75149493 --- billingstack/central/flows/merchant.py | 24 +- billingstack/central/service.py | 10 +- .../collector/flows/gateway_configuration.py | 71 +- .../collector/flows/payment_method.py | 81 +-- billingstack/collector/service.py | 16 +- billingstack/openstack/common/context.py | 7 +- billingstack/openstack/common/excutils.py | 6 +- billingstack/openstack/common/fileutils.py | 43 +- billingstack/openstack/common/gettextutils.py | 108 ++- billingstack/openstack/common/jsonutils.py | 20 +- billingstack/openstack/common/lockutils.py | 63 +- billingstack/openstack/common/log.py | 101 ++- .../openstack/common/notifier/rpc_notifier.py | 3 +- .../common/notifier/rpc_notifier2.py | 3 +- billingstack/openstack/common/processutils.py | 17 +- billingstack/openstack/common/rpc/__init__.py | 4 +- billingstack/openstack/common/rpc/amqp.py | 43 +- billingstack/openstack/common/rpc/common.py | 11 +- .../openstack/common/rpc/impl_fake.py | 2 +- .../openstack/common/rpc/impl_kombu.py | 17 +- .../openstack/common/rpc/impl_qpid.py | 236 +++++-- billingstack/openstack/common/rpc/impl_zmq.py | 2 +- billingstack/openstack/common/rpc/proxy.py | 3 +- .../openstack/common/rpc/serializer.py | 4 +- billingstack/openstack/common/service.py | 77 ++- billingstack/openstack/common/test.py | 54 ++ billingstack/openstack/common/threadgroup.py | 4 + billingstack/openstack/common/timeutils.py | 17 +- billingstack/openstack/common/versionutils.py | 45 ++ billingstack/taskflow/__init__.py | 1 - billingstack/taskflow/decorators.py | 97 --- billingstack/taskflow/exceptions.py | 77 --- billingstack/taskflow/flow.py | 216 ------ billingstack/taskflow/functor_task.py | 95 --- billingstack/taskflow/graph_utils.py | 80 --- billingstack/taskflow/patterns/__init__.py | 1 - billingstack/taskflow/patterns/linear_flow.py | 286 -------- .../taskflow/patterns/threaded_flow.py | 636 ------------------ billingstack/taskflow/states.py | 44 -- billingstack/taskflow/task.py | 77 --- billingstack/taskflow/utils.py | 532 --------------- billingstack/tasks.py | 35 +- doc/source/install/manual.rst | 4 +- requirements.txt | 28 + setup.py | 2 +- taskflow.conf | 7 - tools/test-requires => test-requirements.txt | 0 tools/colorizer.py | 333 --------- tools/install_venv.py | 76 --- tools/install_venv_common.py | 224 ------ tools/patch_tox_venv.py | 39 -- tools/pip-options | 8 - tools/pip-requires | 21 - tools/setup-requires | 0 tools/with_venv.sh | 2 +- tox.ini | 17 +- 56 files changed, 778 insertions(+), 3252 deletions(-) create mode 100644 billingstack/openstack/common/test.py create mode 100644 billingstack/openstack/common/versionutils.py delete mode 100644 billingstack/taskflow/__init__.py delete mode 100644 billingstack/taskflow/decorators.py delete mode 100644 billingstack/taskflow/exceptions.py delete mode 100644 billingstack/taskflow/flow.py delete mode 100644 billingstack/taskflow/functor_task.py delete mode 100644 billingstack/taskflow/graph_utils.py delete mode 100644 billingstack/taskflow/patterns/__init__.py delete mode 100644 billingstack/taskflow/patterns/linear_flow.py delete mode 100644 billingstack/taskflow/patterns/threaded_flow.py delete mode 100644 billingstack/taskflow/states.py delete mode 100644 billingstack/taskflow/task.py delete mode 100644 billingstack/taskflow/utils.py create mode 100644 requirements.txt delete mode 100644 taskflow.conf rename tools/test-requires => test-requirements.txt (100%) delete mode 100755 tools/colorizer.py delete mode 100644 tools/install_venv.py delete mode 100644 tools/install_venv_common.py delete mode 100644 tools/patch_tox_venv.py delete mode 100644 tools/pip-options delete mode 100644 tools/pip-requires delete mode 100644 tools/setup-requires diff --git a/billingstack/central/flows/merchant.py b/billingstack/central/flows/merchant.py index 7a6cf36..29ab1e9 100644 --- a/billingstack/central/flows/merchant.py +++ b/billingstack/central/flows/merchant.py @@ -5,7 +5,7 @@ # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain +# not use this file except in co68mpliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 @@ -15,9 +15,10 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +from taskflow.patterns import linear_flow + from billingstack import tasks from billingstack.openstack.common import log -from billingstack.taskflow.patterns import linear_flow ACTION = 'merchant:create' @@ -27,23 +28,16 @@ class EntryCreateTask(tasks.RootTask): def __init__(self, storage, **kw): super(EntryCreateTask, self).__init__(**kw) - self.requires.update(['merchant']) - self.provides.update(['merchant']) self.storage = storage - def __call__(self, context, merchant): - values = self.storage.create_merchant(context, merchant) - return {'merchant': values} + def execute(self, ctxt, values): + return self.storage.create_merchant(ctxt, values) -def create_flow(storage, values): +def create_flow(storage): flow = linear_flow.Flow(ACTION) - flow.add(tasks.ValuesInjectTask( - {'merchant': values}, - prefix=ACTION + ':initial')) - - entry_task = EntryCreateTask(storage, prefix=ACTION) - entry_task_id = flow.add(entry_task) + entry_task = EntryCreateTask(storage, provides='merchant', prefix=ACTION) + flow.add(entry_task) - return entry_task_id, tasks._attach_debug_listeners(flow) + return flow diff --git a/billingstack/central/service.py b/billingstack/central/service.py index b2ffceb..54a757c 100644 --- a/billingstack/central/service.py +++ b/billingstack/central/service.py @@ -16,6 +16,9 @@ import sys from oslo.config import cfg +from taskflow.engines import run as run_flow + + from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service from billingstack.openstack.common import service as os_service @@ -103,9 +106,10 @@ def get_pg_provider(self, ctxt, pgp_id): # Merchant def create_merchant(self, ctxt, values): - id_, flow = merchant.create_flow(self.storage_conn, values) - flow.run(ctxt) - return flow.results[id_]['merchant'] + flow = merchant.create_flow(self.storage_conn) + result = run_flow(flow, engine_conf="parallel", + store={'values': values, 'ctxt': ctxt}) + return result['merchant'] def list_merchants(self, ctxt, **kw): return self.storage_conn.list_merchants(ctxt, **kw) diff --git a/billingstack/collector/flows/gateway_configuration.py b/billingstack/collector/flows/gateway_configuration.py index 521a0a1..0acebd5 100644 --- a/billingstack/collector/flows/gateway_configuration.py +++ b/billingstack/collector/flows/gateway_configuration.py @@ -15,12 +15,13 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +from taskflow.patterns import linear_flow + from billingstack import exceptions from billingstack import tasks from billingstack.collector import states from billingstack.openstack.common import log from billingstack.payment_gateway import get_provider -from billingstack.taskflow.patterns import linear_flow, threaded_flow ACTION = 'gateway_configuration:create' @@ -31,32 +32,11 @@ class EntryCreateTask(tasks.RootTask): def __init__(self, storage, **kw): super(EntryCreateTask, self).__init__(**kw) - self.requires.update(['gateway_config']) - self.provides.update(['gateway_config']) self.storage = storage - def __call__(self, context, gateway_config): - gateway_config['state'] = states.VERIFYING - values = self.storage.create_pg_config(context, gateway_config) - return {'gateway_config': values} - - -class ThreadStartTask(tasks.RootTask): - """ - This is the end of the current flow, we'll fire off a new threaded flow - that does stuff towards the actual Gateway which may include blocking code. - """ - def __init__(self, storage, **kw): - super(ThreadStartTask, self).__init__(**kw) - self.requires.update(['gateway_config']) - self.storage = storage - - def __call__(self, ctxt, gateway_config): - flow = threaded_flow.Flow(ACTION + ':backend') - flow.add(tasks.ValuesInjectTask({'gateway_config': gateway_config})) - flow.add(PrerequirementsTask(self.storage)) - flow.add(BackendVerifyTask(self.storage)) - flow.run(ctxt) + def execute(self, ctxt, values): + values['state'] = states.VERIFYING + return self.storage.create_pg_config(ctxt, values) class PrerequirementsTask(tasks.RootTask): @@ -65,20 +45,11 @@ class PrerequirementsTask(tasks.RootTask): """ def __init__(self, storage, **kw): super(PrerequirementsTask, self).__init__(**kw) - self.requires.update(['gateway_config']) - self.provides.update([ - 'gateway_config', - 'gateway_provider' - ]) self.storage = storage - def __call__(self, ctxt, gateway_config): - gateway_provider = self.storage.get_pg_provider( - gateway_config['providedr_id']) - return { - 'gateway_config': gateway_config, - 'gateway_provider': gateway_provider - } + def execute(self, ctxt, gateway_config): + return self.storage.get_pg_provider( + ctxt, gateway_config['provider_id']) class BackendVerifyTask(tasks.RootTask): @@ -92,11 +63,10 @@ class BackendVerifyTask(tasks.RootTask): """ def __init__(self, storage, **kw): super(BackendVerifyTask, self).__init__(**kw) - self.requires.update(['gateway_config', 'gateway_provider']) self.storage = storage - def __call__(self, ctxt, gateway_config, gateway_provider): - gateway_provider_cls = get_provider[gateway_provider['name']] + def execute(self, ctxt, gateway_config, gateway_provider): + gateway_provider_cls = get_provider(gateway_provider['name']) gateway_provider_obj = gateway_provider_cls(gateway_config) try: @@ -109,14 +79,19 @@ def __call__(self, ctxt, gateway_config, gateway_provider): ctxt, gateway_config['id'], {'state': states.ACTIVE}) -def create_flow(storage, values): - flow = linear_flow.Flow(ACTION) +def create_flow(storage): + flow = linear_flow.Flow(ACTION + ':initial') + + entry_task = EntryCreateTask( + storage, provides='gateway_config', prefix=ACTION) + flow.add(entry_task) - flow.add(tasks.ValuesInjectTask( - {'gateway_config': values}, - prefix=ACTION + ':initial')) + backend_flow = linear_flow.Flow(ACTION + ':backend') + prereq_task = PrerequirementsTask( + storage, provides='gateway_provider', prefix=ACTION) + backend_flow.add(prereq_task) + backend_flow.add(BackendVerifyTask(storage, prefix=ACTION)) - entry_task = EntryCreateTask(storage, prefix=ACTION) - entry_task_id = flow.add(entry_task) + flow.add(backend_flow) - return entry_task_id, tasks._attach_debug_listeners(flow) + return flow diff --git a/billingstack/collector/flows/payment_method.py b/billingstack/collector/flows/payment_method.py index b5ef272..bf011b1 100644 --- a/billingstack/collector/flows/payment_method.py +++ b/billingstack/collector/flows/payment_method.py @@ -15,12 +15,13 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +from taskflow.patterns import linear_flow + from billingstack import exceptions from billingstack import tasks from billingstack.collector import states from billingstack.openstack.common import log from billingstack.payment_gateway import get_provider -from billingstack.taskflow.patterns import linear_flow, threaded_flow ACTION = 'payment_method:create' @@ -34,34 +35,11 @@ class EntryCreateTask(tasks.RootTask): """ def __init__(self, storage, **kw): super(EntryCreateTask, self).__init__(**kw) - self.requires.update(['payment_method']) - self.provides.update(['payment_method']) - self.storage = storage - - def __call__(self, ctxt, payment_method): - payment_method['state'] = states.PENDING - values = self.storage.create_payment_method(ctxt, payment_method) - return {'payment_method': values} - - -class ThreadStartTask(tasks.RootTask): - """ - This is the end of the current flow, we'll fire off a new threaded flow - that does stuff towards the actual Gateway which may include blocking code. - - This fires off a new flow that is threaded / greenthreads? - """ - def __init__(self, storage, **kw): - super(ThreadStartTask, self).__init__(**kw) - self.requires.update(['payment_method']) self.storage = storage - def __call__(self, ctxt, payment_method): - flow = threaded_flow.Flow(ACTION + ':backend') - flow.add(tasks.ValuesInjectTask({'payment_method': payment_method})) - flow.add(PrerequirementsTask(self.storage)) - flow.add(BackendCreateTask(self.storage)) - flow.run(ctxt) + def execute(self, ctxt, values): + values['state'] = states.PENDING + return self.storage.create_payment_method(ctxt, values) class PrerequirementsTask(tasks.RootTask): @@ -70,33 +48,23 @@ class PrerequirementsTask(tasks.RootTask): """ def __init__(self, storage, **kw): super(PrerequirementsTask, self).__init__(**kw) - self.requires.update(['payment_method']) - self.provides.update([ - 'payment_method', - 'gateway_config', - 'gateway_provider']) self.storage = storage - def __call__(self, ctxt, **kw): - kw['gateway_config'] = self.storage.get_pg_config( - ctxt, kw['payment_method']['provider_config_id']) - - kw['gateway_provider'] = self.storage.get_pg_provider( - ctxt, kw['gateway_config']['provider_id']) - - return kw + def execute(self, ctxt, values): + data = {} + data['gateway_config'] = self.storage.get_pg_config( + ctxt, values['provider_config_id']) + data['gateway_provider'] = self.storage.get_pg_provider( + ctxt, data['gateway_config']['provider_id']) + return data class BackendCreateTask(tasks.RootTask): def __init__(self, storage, **kw): super(BackendCreateTask, self).__init__(**kw) - self.requires.update([ - 'payment_method', - 'gateway_config', - 'gateway_provider']) self.storage = storage - def __call__(self, ctxt, payment_method, gateway_config, gateway_provider): + def execute(self, ctxt, payment_method, gateway_config, gateway_provider): gateway_provider_cls = get_provider(gateway_provider['name']) gateway_provider_obj = gateway_provider_cls(gateway_config) @@ -110,19 +78,26 @@ def __call__(self, ctxt, payment_method, gateway_config, gateway_provider): raise -def create_flow(storage, payment_method): +def create_flow(storage): """ The flow for the service to start """ flow = linear_flow.Flow(ACTION + ':initial') - flow.add(tasks.ValuesInjectTask( - {'payment_method': payment_method}, - prefix=ACTION)) + entry_task = EntryCreateTask(storage, provides='payment_method', + prefix=ACTION) + flow.add(entry_task) - entry_task = EntryCreateTask(storage, prefix=ACTION) - entry_task_id = flow.add(entry_task) + backend_flow = linear_flow.Flow(ACTION + ':backend') + prereq_task = PrerequirementsTask( + storage, + provides=set([ + 'gateway_config', + 'gateway_provider']), + prefix=ACTION) + backend_flow.add(prereq_task) + backend_flow.add(BackendCreateTask(storage, prefix=ACTION)) - flow.add(ThreadStartTask(storage, prefix=ACTION)) + flow.add(backend_flow) - return entry_task_id, tasks._attach_debug_listeners(flow) + return flow diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py index 6314de1..f35d79c 100644 --- a/billingstack/collector/service.py +++ b/billingstack/collector/service.py @@ -20,6 +20,8 @@ import sys from oslo.config import cfg +from taskflow.engines import run as run_flow + from billingstack.openstack.common import log as logging from billingstack.openstack.common.rpc import service as rpc_service from billingstack.openstack.common import service as os_service @@ -64,10 +66,9 @@ def list_pg_providers(self, ctxt, **kw): # PGC def create_pg_config(self, ctxt, values): - id_, flow = gateway_configuration.create_flow( - self.storage_conn, values) - flow.run(ctxt) - return flow.results[id_]['gateway_config'] + flow = gateway_configuration.create_flow(self.storage_conn) + results = run_flow(flow, store={'values': values, 'ctxt': ctxt}) + return results['gateway_config'] def list_pg_configs(self, ctxt, **kw): return self.storage_conn.list_pg_configs(ctxt, **kw) @@ -83,10 +84,9 @@ def delete_pg_config(self, ctxt, id_): # PM def create_payment_method(self, ctxt, values): - id_, flow = payment_method.create_flow( - self.storage_conn, values) - flow.run(ctxt) - return flow.results[id_]['payment_method'] + flow = payment_method.create_flow(self.storage_conn) + results = run_flow(flow, store={'values': values, 'ctxt': ctxt}) + return results['payment_method'] def list_payment_methods(self, ctxt, **kw): return self.storage_conn.list_payment_methods(ctxt, **kw) diff --git a/billingstack/openstack/common/context.py b/billingstack/openstack/common/context.py index 401c9dd..d074b02 100644 --- a/billingstack/openstack/common/context.py +++ b/billingstack/openstack/common/context.py @@ -40,13 +40,15 @@ class RequestContext(object): """ def __init__(self, auth_token=None, user=None, tenant=None, is_admin=False, - read_only=False, show_deleted=False, request_id=None): + read_only=False, show_deleted=False, request_id=None, + instance_uuid=None): self.auth_token = auth_token self.user = user self.tenant = tenant self.is_admin = is_admin self.read_only = read_only self.show_deleted = show_deleted + self.instance_uuid = instance_uuid if not request_id: request_id = generate_request_id() self.request_id = request_id @@ -58,7 +60,8 @@ def to_dict(self): 'read_only': self.read_only, 'show_deleted': self.show_deleted, 'auth_token': self.auth_token, - 'request_id': self.request_id} + 'request_id': self.request_id, + 'instance_uuid': self.instance_uuid} def get_admin_context(show_deleted=False): diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py index a2ac554..7c4db8a 100644 --- a/billingstack/openstack/common/excutils.py +++ b/billingstack/openstack/common/excutils.py @@ -24,6 +24,8 @@ import time import traceback +import six + from billingstack.openstack.common.gettextutils import _ # noqa @@ -65,7 +67,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.tb)) return False if self.reraise: - raise self.type_, self.value, self.tb + six.reraise(self.type_, self.value, self.tb) def forever_retry_uncaught_exceptions(infunc): @@ -77,7 +79,7 @@ def inner_func(*args, **kwargs): try: return infunc(*args, **kwargs) except Exception as exc: - this_exc_message = unicode(exc) + this_exc_message = six.u(str(exc)) if this_exc_message == last_exc_message: exc_count += 1 else: diff --git a/billingstack/openstack/common/fileutils.py b/billingstack/openstack/common/fileutils.py index 4e5ee68..d452c25 100644 --- a/billingstack/openstack/common/fileutils.py +++ b/billingstack/openstack/common/fileutils.py @@ -19,6 +19,7 @@ import contextlib import errno import os +import tempfile from billingstack.openstack.common import excutils from billingstack.openstack.common.gettextutils import _ # noqa @@ -69,33 +70,34 @@ def read_cached_file(filename, force_reload=False): return (reloaded, cache_info['data']) -def delete_if_exists(path): +def delete_if_exists(path, remove=os.unlink): """Delete a file, but ignore file not found error. :param path: File to delete + :param remove: Optional function to remove passed path """ try: - os.unlink(path) + remove(path) except OSError as e: - if e.errno == errno.ENOENT: - return - else: + if e.errno != errno.ENOENT: raise @contextlib.contextmanager -def remove_path_on_error(path): +def remove_path_on_error(path, remove=delete_if_exists): """Protect code that wants to operate on PATH atomically. Any exception will cause PATH to be removed. :param path: File to work with + :param remove: Optional function to remove passed path """ + try: yield except Exception: with excutils.save_and_reraise_exception(): - delete_if_exists(path) + remove(path) def file_open(*args, **kwargs): @@ -108,3 +110,30 @@ def file_open(*args, **kwargs): state at all (for unit tests) """ return file(*args, **kwargs) + + +def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): + """Create temporary file or use existing file. + + This util is needed for creating temporary file with + specified content, suffix and prefix. If path is not None, + it will be used for writing content. If the path doesn't + exist it'll be created. + + :param content: content for temporary file. + :param path: same as parameter 'dir' for mkstemp + :param suffix: same as parameter 'suffix' for mkstemp + :param prefix: same as parameter 'prefix' for mkstemp + + For example: it can be used in database tests for creating + configuration files. + """ + if path: + ensure_tree(path) + + (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) + try: + os.write(fd, content) + finally: + os.close(fd) + return path diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py index cc90613..7bd7183 100644 --- a/billingstack/openstack/common/gettextutils.py +++ b/billingstack/openstack/common/gettextutils.py @@ -26,10 +26,13 @@ import copy import gettext -import logging.handlers +import logging import os import re -import UserString +try: + import UserString as _userString +except ImportError: + import collections as _userString from babel import localedata import six @@ -37,7 +40,7 @@ _localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') _t = gettext.translation('billingstack', localedir=_localedir, fallback=True) -_AVAILABLE_LANGUAGES = [] +_AVAILABLE_LANGUAGES = {} USE_LAZY = False @@ -57,6 +60,8 @@ def _(msg): if USE_LAZY: return Message(msg, 'billingstack') else: + if six.PY3: + return _t.gettext(msg) return _t.ugettext(msg) @@ -102,24 +107,28 @@ def _lazy_gettext(msg): """ return Message(msg, domain) - import __builtin__ - __builtin__.__dict__['_'] = _lazy_gettext + from six import moves + moves.builtins.__dict__['_'] = _lazy_gettext else: localedir = '%s_LOCALEDIR' % domain.upper() - gettext.install(domain, - localedir=os.environ.get(localedir), - unicode=True) + if six.PY3: + gettext.install(domain, + localedir=os.environ.get(localedir)) + else: + gettext.install(domain, + localedir=os.environ.get(localedir), + unicode=True) -class Message(UserString.UserString, object): +class Message(_userString.UserString, object): """Class used to encapsulate translatable messages.""" def __init__(self, msg, domain): # _msg is the gettext msgid and should never change self._msg = msg self._left_extra_msg = '' self._right_extra_msg = '' + self._locale = None self.params = None - self.locale = None self.domain = domain @property @@ -139,8 +148,13 @@ def data(self): localedir=localedir, fallback=True) + if six.PY3: + ugettext = lang.gettext + else: + ugettext = lang.ugettext + full_msg = (self._left_extra_msg + - lang.ugettext(self._msg) + + ugettext(self._msg) + self._right_extra_msg) if self.params is not None: @@ -148,6 +162,33 @@ def data(self): return six.text_type(full_msg) + @property + def locale(self): + return self._locale + + @locale.setter + def locale(self, value): + self._locale = value + if not self.params: + return + + # This Message object may have been constructed with one or more + # Message objects as substitution parameters, given as a single + # Message, or a tuple or Map containing some, so when setting the + # locale for this Message we need to set it for those Messages too. + if isinstance(self.params, Message): + self.params.locale = value + return + if isinstance(self.params, tuple): + for param in self.params: + if isinstance(param, Message): + param.locale = value + return + if isinstance(self.params, dict): + for param in self.params.values(): + if isinstance(param, Message): + param.locale = value + def _save_dictionary_parameter(self, dict_param): full_msg = self.data # look for %(blah) fields in string; @@ -166,7 +207,7 @@ def _save_dictionary_parameter(self, dict_param): params[key] = copy.deepcopy(dict_param[key]) except TypeError: # cast uncopyable thing to unicode string - params[key] = unicode(dict_param[key]) + params[key] = six.text_type(dict_param[key]) return params @@ -185,7 +226,7 @@ def _save_parameters(self, other): try: self.params = copy.deepcopy(other) except TypeError: - self.params = unicode(other) + self.params = six.text_type(other) return self @@ -194,11 +235,13 @@ def __unicode__(self): return self.data def __str__(self): + if six.PY3: + return self.__unicode__() return self.data.encode('utf-8') def __getstate__(self): to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg', - 'domain', 'params', 'locale'] + 'domain', 'params', '_locale'] new_dict = self.__dict__.fromkeys(to_copy) for attr in to_copy: new_dict[attr] = copy.deepcopy(self.__dict__[attr]) @@ -252,7 +295,7 @@ def __getattribute__(self, name): if name in ops: return getattr(self.data, name) else: - return UserString.UserString.__getattribute__(self, name) + return _userString.UserString.__getattribute__(self, name) def get_available_languages(domain): @@ -260,8 +303,8 @@ def get_available_languages(domain): :param domain: the domain to get languages for """ - if _AVAILABLE_LANGUAGES: - return _AVAILABLE_LANGUAGES + if domain in _AVAILABLE_LANGUAGES: + return copy.copy(_AVAILABLE_LANGUAGES[domain]) localedir = '%s_LOCALEDIR' % domain.upper() find = lambda x: gettext.find(domain, @@ -270,28 +313,37 @@ def get_available_languages(domain): # NOTE(mrodden): en_US should always be available (and first in case # order matters) since our in-line message strings are en_US - _AVAILABLE_LANGUAGES.append('en_US') + language_list = ['en_US'] # NOTE(luisg): Babel <1.0 used a function called list(), which was # renamed to locale_identifiers() in >=1.0, the requirements master list # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and all projects udpate + # this check when the master list updates to >=1.0, and update all projects list_identifiers = (getattr(localedata, 'list', None) or getattr(localedata, 'locale_identifiers')) locale_identifiers = list_identifiers() for i in locale_identifiers: if find(i) is not None: - _AVAILABLE_LANGUAGES.append(i) - return _AVAILABLE_LANGUAGES + language_list.append(i) + _AVAILABLE_LANGUAGES[domain] = language_list + return copy.copy(language_list) def get_localized_message(message, user_locale): - """Gets a localized version of the given message in the given locale.""" - if (isinstance(message, Message)): - if user_locale: - message.locale = user_locale - return unicode(message) - else: - return message + """Gets a localized version of the given message in the given locale. + + If the message is not a Message object the message is returned as-is. + If the locale is None the message is translated to the default locale. + + :returns: the translated message in unicode, or the original message if + it could not be translated + """ + translated = message + if isinstance(message, Message): + original_locale = message.locale + message.locale = user_locale + translated = six.text_type(message) + message.locale = original_locale + return translated class LocaleHandler(logging.Handler): diff --git a/billingstack/openstack/common/jsonutils.py b/billingstack/openstack/common/jsonutils.py index 493ff87..e8ab2d5 100644 --- a/billingstack/openstack/common/jsonutils.py +++ b/billingstack/openstack/common/jsonutils.py @@ -38,14 +38,19 @@ import inspect import itertools import json -import types -import xmlrpclib +try: + import xmlrpclib +except ImportError: + # NOTE(jd): xmlrpclib is not shipped with Python 3 + xmlrpclib = None -import netaddr import six +from billingstack.openstack.common import gettextutils +from billingstack.openstack.common import importutils from billingstack.openstack.common import timeutils +netaddr = importutils.try_import("netaddr") _nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, inspect.isfunction, inspect.isgeneratorfunction, @@ -53,7 +58,8 @@ inspect.iscode, inspect.isbuiltin, inspect.isroutine, inspect.isabstract] -_simple_types = (types.NoneType, int, basestring, bool, float, long) +_simple_types = (six.string_types + six.integer_types + + (type(None), bool, float)) def to_primitive(value, convert_instances=False, convert_datetime=True, @@ -125,11 +131,13 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, # It's not clear why xmlrpclib created their own DateTime type, but # for our purposes, make it a datetime type which is explicitly # handled - if isinstance(value, xmlrpclib.DateTime): + if xmlrpclib and isinstance(value, xmlrpclib.DateTime): value = datetime.datetime(*tuple(value.timetuple())[:6]) if convert_datetime and isinstance(value, datetime.datetime): return timeutils.strtime(value) + elif isinstance(value, gettextutils.Message): + return value.data elif hasattr(value, 'iteritems'): return recursive(dict(value.iteritems()), level=level + 1) elif hasattr(value, '__iter__'): @@ -138,7 +146,7 @@ def to_primitive(value, convert_instances=False, convert_datetime=True, # Likely an instance of something. Watch for cycles. # Ignore class member vars. return recursive(value.__dict__, level=level + 1) - elif isinstance(value, netaddr.IPAddress): + elif netaddr and isinstance(value, netaddr.IPAddress): return six.text_type(value) else: if any(test(value) for test in _nasty_type_tests): diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py index d57920d..bd35ab5 100644 --- a/billingstack/openstack/common/lockutils.py +++ b/billingstack/openstack/common/lockutils.py @@ -20,10 +20,14 @@ import errno import functools import os +import shutil +import subprocess +import sys +import tempfile +import threading import time import weakref -from eventlet import semaphore from oslo.config import cfg from billingstack.openstack.common import fileutils @@ -39,6 +43,7 @@ cfg.BoolOpt('disable_process_locking', default=False, help='Whether to disable inter-process locks'), cfg.StrOpt('lock_path', + default=os.environ.get("BILLINGSTACK_LOCK_PATH"), help=('Directory to use for lock files.')) ] @@ -131,13 +136,15 @@ def unlock(self): InterProcessLock = _PosixLock _semaphores = weakref.WeakValueDictionary() +_semaphores_lock = threading.Lock() @contextlib.contextmanager def lock(name, lock_file_prefix=None, external=False, lock_path=None): """Context based lock - This function yields a `semaphore.Semaphore` instance unless external is + This function yields a `threading.Semaphore` instance (if we don't use + eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is True, in which case, it'll yield an InterProcessLock instance. :param lock_file_prefix: The lock_file_prefix argument is used to provide @@ -152,15 +159,12 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None): special location for external lock files to live. If nothing is set, then CONF.lock_path is used as a default. """ - # NOTE(soren): If we ever go natively threaded, this will be racy. - # See http://stackoverflow.com/questions/5390569/dyn - # amically-allocating-and-destroying-mutexes - sem = _semaphores.get(name, semaphore.Semaphore()) - if name not in _semaphores: - # this check is not racy - we're already holding ref locally - # so GC won't remove the item and there was no IO switch - # (only valid in greenthreads) - _semaphores[name] = sem + with _semaphores_lock: + try: + sem = _semaphores[name] + except KeyError: + sem = threading.Semaphore() + _semaphores[name] = sem with sem: LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) @@ -240,13 +244,14 @@ def bar(self, *args): def wrap(f): @functools.wraps(f) def inner(*args, **kwargs): - with lock(name, lock_file_prefix, external, lock_path): - LOG.debug(_('Got semaphore / lock "%(function)s"'), + try: + with lock(name, lock_file_prefix, external, lock_path): + LOG.debug(_('Got semaphore / lock "%(function)s"'), + {'function': f.__name__}) + return f(*args, **kwargs) + finally: + LOG.debug(_('Semaphore / lock released "%(function)s"'), {'function': f.__name__}) - return f(*args, **kwargs) - - LOG.debug(_('Semaphore / lock released "%(function)s"'), - {'function': f.__name__}) return inner return wrap @@ -274,3 +279,27 @@ def bar(self, *args): """ return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) + + +def main(argv): + """Create a dir for locks and pass it to command from arguments + + If you run this: + python -m openstack.common.lockutils python setup.py testr + + a temporary directory will be created for all your locks and passed to all + your tests in an environment variable. The temporary dir will be deleted + afterwards and the return value will be preserved. + """ + + lock_dir = tempfile.mkdtemp() + os.environ["BILLINGSTACK_LOCK_PATH"] = lock_dir + try: + ret_val = subprocess.call(argv[1:]) + finally: + shutil.rmtree(lock_dir, ignore_errors=True) + return ret_val + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/billingstack/openstack/common/log.py b/billingstack/openstack/common/log.py index 246d4de..5c0b093 100644 --- a/billingstack/openstack/common/log.py +++ b/billingstack/openstack/common/log.py @@ -35,10 +35,12 @@ import logging.config import logging.handlers import os +import re import sys import traceback from oslo.config import cfg +import six from six import moves from billingstack.openstack.common.gettextutils import _ # noqa @@ -49,6 +51,24 @@ _DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" +_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password'] + +# NOTE(ldbragst): Let's build a list of regex objects using the list of +# _SANITIZE_KEYS we already have. This way, we only have to add the new key +# to the list of _SANITIZE_KEYS and we can generate regular expressions +# for XML and JSON automatically. +_SANITIZE_PATTERNS = [] +_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', + r'(<%(key)s>).*?()', + r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', + r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] + +for key in _SANITIZE_KEYS: + for pattern in _FORMAT_PATTERNS: + reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) + _SANITIZE_PATTERNS.append(reg_ex) + + common_cli_opts = [ cfg.BoolOpt('debug', short='d', @@ -63,11 +83,13 @@ ] logging_cli_opts = [ - cfg.StrOpt('log-config', + cfg.StrOpt('log-config-append', metavar='PATH', - help='If this option is specified, the logging configuration ' - 'file specified is used and overrides any other logging ' - 'options specified. Please see the Python logging module ' + deprecated_name='log-config', + help='The name of logging configuration file. It does not ' + 'disable existing loggers, but just appends specified ' + 'logging configuration to any other existing logging ' + 'options. Please see the Python logging module ' 'documentation for details on logging configuration ' 'files.'), cfg.StrOpt('log-format', @@ -126,12 +148,14 @@ help='prefix each line of exception output with this format'), cfg.ListOpt('default_log_levels', default=[ + 'amqp=WARN', 'amqplib=WARN', - 'sqlalchemy=WARN', 'boto=WARN', - 'suds=INFO', 'keystone=INFO', - 'eventlet.wsgi.server=WARN' + 'qpid=WARN', + 'sqlalchemy=WARN', + 'suds=INFO', + 'iso8601=WARN', ], help='list of logger=LEVEL pairs'), cfg.BoolOpt('publish_errors', @@ -207,6 +231,41 @@ def _get_log_file_path(binary=None): binary = binary or _get_binary_name() return '%s.log' % (os.path.join(logdir, binary),) + return None + + +def mask_password(message, secret="***"): + """Replace password with 'secret' in message. + + :param message: The string which includes security information. + :param secret: value with which to replace passwords, defaults to "***". + :returns: The unicode value of message with the password fields masked. + + For example: + >>> mask_password("'adminPass' : 'aaaaa'") + "'adminPass' : '***'" + >>> mask_password("'admin_pass' : 'aaaaa'") + "'admin_pass' : '***'" + >>> mask_password('"password" : "aaaaa"') + '"password" : "***"' + >>> mask_password("'original_password' : 'aaaaa'") + "'original_password' : '***'" + >>> mask_password("u'original_password' : u'aaaaa'") + "u'original_password' : u'***'" + """ + message = six.text_type(message) + + # NOTE(ldbragst): Check to see if anything in message contains any key + # specified in _SANITIZE_KEYS, if not then just return the message since + # we don't have to mask any passwords. + if not any(key in message for key in _SANITIZE_KEYS): + return message + + secret = r'\g<1>' + secret + r'\g<2>' + for pattern in _SANITIZE_PATTERNS: + message = re.sub(pattern, secret, message) + return message + class BaseLoggerAdapter(logging.LoggerAdapter): @@ -249,6 +308,13 @@ def deprecated(self, msg, *args, **kwargs): self.warn(stdmsg, *args, **kwargs) def process(self, msg, kwargs): + # NOTE(mrodden): catch any Message/other object and + # coerce to unicode before they can get + # to the python logging and possibly + # cause string encoding trouble + if not isinstance(msg, six.string_types): + msg = six.text_type(msg) + if 'extra' not in kwargs: kwargs['extra'] = {} extra = kwargs['extra'] @@ -260,14 +326,14 @@ def process(self, msg, kwargs): extra.update(_dictify_context(context)) instance = kwargs.pop('instance', None) + instance_uuid = (extra.get('instance_uuid', None) or + kwargs.pop('instance_uuid', None)) instance_extra = '' if instance: instance_extra = CONF.instance_format % instance - else: - instance_uuid = kwargs.pop('instance_uuid', None) - if instance_uuid: - instance_extra = (CONF.instance_uuid_format - % {'uuid': instance_uuid}) + elif instance_uuid: + instance_extra = (CONF.instance_uuid_format + % {'uuid': instance_uuid}) extra.update({'instance': instance_extra}) extra.update({"project": self.project}) @@ -344,17 +410,18 @@ def __str__(self): err_msg=self.err_msg) -def _load_log_config(log_config): +def _load_log_config(log_config_append): try: - logging.config.fileConfig(log_config) + logging.config.fileConfig(log_config_append, + disable_existing_loggers=False) except moves.configparser.Error as exc: - raise LogConfigError(log_config, str(exc)) + raise LogConfigError(log_config_append, str(exc)) def setup(product_name): """Setup logging.""" - if CONF.log_config: - _load_log_config(CONF.log_config) + if CONF.log_config_append: + _load_log_config(CONF.log_config_append) else: _setup_logging_from_conf() sys.excepthook = _create_logging_excepthook(product_name) diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py index 1b230a4..31e6d93 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ b/billingstack/openstack/common/notifier/rpc_notifier.py @@ -43,4 +43,5 @@ def notify(context, message): rpc.notify(context, topic, message) except Exception: LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), locals()) + "Payload=%(message)s"), + {"topic": topic, "message": message}) diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py index af10d48..3474073 100644 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ b/billingstack/openstack/common/notifier/rpc_notifier2.py @@ -49,4 +49,5 @@ def notify(context, message): rpc.notify(context, topic, message, envelope=True) except Exception: LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), locals()) + "Payload=%(message)s"), + {"topic": topic, "message": message}) diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py index 169c52c..fdcb3d1 100644 --- a/billingstack/openstack/common/processutils.py +++ b/billingstack/openstack/common/processutils.py @@ -132,7 +132,7 @@ def execute(*cmd, **kwargs): raise UnknownArgumentError(_('Got unknown keyword args ' 'to utils.execute: %r') % kwargs) - if run_as_root and os.geteuid() != 0: + if run_as_root and hasattr(os, 'geteuid') and os.geteuid() != 0: if not root_helper: raise NoRootWrapSpecified( message=('Command requested root, but did not specify a root ' @@ -168,14 +168,13 @@ def execute(*cmd, **kwargs): result = obj.communicate() obj.stdin.close() # pylint: disable=E1101 _returncode = obj.returncode # pylint: disable=E1101 - if _returncode: - LOG.log(loglevel, _('Result was %s') % _returncode) - if not ignore_exit_code and _returncode not in check_exit_code: - (stdout, stderr) = result - raise ProcessExecutionError(exit_code=_returncode, - stdout=stdout, - stderr=stderr, - cmd=' '.join(cmd)) + LOG.log(loglevel, _('Result was %s') % _returncode) + if not ignore_exit_code and _returncode not in check_exit_code: + (stdout, stderr) = result + raise ProcessExecutionError(exit_code=_returncode, + stdout=stdout, + stderr=stderr, + cmd=' '.join(cmd)) return result except ProcessExecutionError: if not attempts: diff --git a/billingstack/openstack/common/rpc/__init__.py b/billingstack/openstack/common/rpc/__init__.py index 423d845..6d972aa 100644 --- a/billingstack/openstack/common/rpc/__init__.py +++ b/billingstack/openstack/common/rpc/__init__.py @@ -61,7 +61,7 @@ 'exceptions', ], help='Modules of exceptions that are permitted to be recreated' - 'upon receiving exception data from an rpc call.'), + ' upon receiving exception data from an rpc call.'), cfg.BoolOpt('fake_rabbit', default=False, help='If passed, use a fake RabbitMQ provider'), @@ -227,7 +227,7 @@ def notify(context, topic, msg, envelope=False): def cleanup(): - """Clean up resoruces in use by implementation. + """Clean up resources in use by implementation. Clean up any resources that have been allocated by the RPC implementation. This is typically open connections to a messaging service. This function diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py index 683bef2..6206d36 100644 --- a/billingstack/openstack/common/rpc/amqp.py +++ b/billingstack/openstack/common/rpc/amqp.py @@ -20,9 +20,9 @@ """ Shared code between AMQP based openstack.common.rpc implementations. -The code in this module is shared between the rpc implemenations based on AMQP. -Specifically, this includes impl_kombu and impl_qpid. impl_carrot also uses -AMQP, but is deprecated and predates this code. +The code in this module is shared between the rpc implementations based on +AMQP. Specifically, this includes impl_kombu and impl_qpid. impl_carrot also +uses AMQP, but is deprecated and predates this code. """ import collections @@ -189,7 +189,7 @@ class ReplyProxy(ConnectionContext): def __init__(self, conf, connection_pool): self._call_waiters = {} self._num_call_waiters = 0 - self._num_call_waiters_wrn_threshhold = 10 + self._num_call_waiters_wrn_threshold = 10 self._reply_q = 'reply_' + uuid.uuid4().hex super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False) self.declare_direct_consumer(self._reply_q, self._process_data) @@ -208,11 +208,11 @@ def _process_data(self, message_data): def add_call_waiter(self, waiter, msg_id): self._num_call_waiters += 1 - if self._num_call_waiters > self._num_call_waiters_wrn_threshhold: + if self._num_call_waiters > self._num_call_waiters_wrn_threshold: LOG.warn(_('Number of call waiters is greater than warning ' - 'threshhold: %d. There could be a MulticallProxyWaiter ' - 'leak.') % self._num_call_waiters_wrn_threshhold) - self._num_call_waiters_wrn_threshhold *= 2 + 'threshold: %d. There could be a MulticallProxyWaiter ' + 'leak.') % self._num_call_waiters_wrn_threshold) + self._num_call_waiters_wrn_threshold *= 2 self._call_waiters[msg_id] = waiter def del_call_waiter(self, msg_id): @@ -241,7 +241,7 @@ def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, _add_unique_id(msg) # If a reply_q exists, add the msg_id to the reply and pass the # reply_q to direct_send() to use it as the response queue. - # Otherwise use the msg_id for backward compatibilty. + # Otherwise use the msg_id for backward compatibility. if reply_q: msg['_msg_id'] = msg_id conn.direct_send(reply_q, rpc_common.serialize_msg(msg)) @@ -364,22 +364,43 @@ class CallbackWrapper(_ThreadPoolWithWait): Allows it to be invoked in a green thread. """ - def __init__(self, conf, callback, connection_pool): + def __init__(self, conf, callback, connection_pool, + wait_for_consumers=False): """Initiates CallbackWrapper object. :param conf: cfg.CONF instance :param callback: a callable (probably a function) :param connection_pool: connection pool as returned by get_connection_pool() + :param wait_for_consumers: wait for all green threads to + complete and raise the last + caught exception, if any. + """ super(CallbackWrapper, self).__init__( conf=conf, connection_pool=connection_pool, ) self.callback = callback + self.wait_for_consumers = wait_for_consumers + self.exc_info = None + + def _wrap(self, message_data, **kwargs): + """Wrap the callback invocation to catch exceptions. + """ + try: + self.callback(message_data, **kwargs) + except Exception: + self.exc_info = sys.exc_info() def __call__(self, message_data): - self.pool.spawn_n(self.callback, message_data) + self.exc_info = None + self.pool.spawn_n(self._wrap, message_data) + + if self.wait_for_consumers: + self.pool.waitall() + if self.exc_info: + raise self.exc_info[1], None, self.exc_info[2] class ProxyCallback(_ThreadPoolWithWait): diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py index b8bc17a..b328715 100644 --- a/billingstack/openstack/common/rpc/common.py +++ b/billingstack/openstack/common/rpc/common.py @@ -29,6 +29,7 @@ from billingstack.openstack.common import jsonutils from billingstack.openstack.common import local from billingstack.openstack.common import log as logging +from billingstack.openstack.common import versionutils CONF = cfg.CONF @@ -441,19 +442,15 @@ def inner(*args, **kwargs): return outer +# TODO(sirp): we should deprecate this in favor of +# using `versionutils.is_compatible` directly def version_is_compatible(imp_version, version): """Determine whether versions are compatible. :param imp_version: The version implemented :param version: The version requested by an incoming message. """ - version_parts = version.split('.') - imp_version_parts = imp_version.split('.') - if int(version_parts[0]) != int(imp_version_parts[0]): # Major - return False - if int(version_parts[1]) > int(imp_version_parts[1]): # Minor - return False - return True + return versionutils.is_compatible(version, imp_version) def serialize_msg(raw_msg): diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py index ef4a39f..e68f67a 100644 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ b/billingstack/openstack/common/rpc/impl_fake.py @@ -146,7 +146,7 @@ def multicall(conf, context, topic, msg, timeout=None): try: consumer = CONSUMERS[topic][0] except (KeyError, IndexError): - return iter([None]) + raise rpc_common.Timeout("No consumers available") else: return consumer.call(context, version, method, namespace, args, timeout) diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py index 8d8dc23..717b8b9 100644 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ b/billingstack/openstack/common/rpc/impl_kombu.py @@ -146,29 +146,23 @@ def _callback_handler(self, message, callback): Messages that are processed without exception are ack'ed. If the message processing generates an exception, it will be - ack'ed if ack_on_error=True. Otherwise it will be .reject()'ed. - Rejection is better than waiting for the message to timeout. - Rejected messages are immediately requeued. + ack'ed if ack_on_error=True. Otherwise it will be .requeue()'ed. """ - ack_msg = False try: msg = rpc_common.deserialize_msg(message.payload) callback(msg) - ack_msg = True except Exception: if self.ack_on_error: - ack_msg = True LOG.exception(_("Failed to process message" " ... skipping it.")) + message.ack() else: LOG.exception(_("Failed to process message" " ... will requeue.")) - finally: - if ack_msg: - message.ack() - else: - message.reject() + message.requeue() + else: + message.ack() def consume(self, *args, **kwargs): """Actually declare the consumer on the amqp channel. This will @@ -789,6 +783,7 @@ def join_consumer_pool(self, callback, pool_name, topic, callback=callback, connection_pool=rpc_amqp.get_connection_pool(self.conf, Connection), + wait_for_consumers=not ack_on_error ) self.proxy_callbacks.append(callback_wrapper) self.declare_topic_consumer( diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py index e75035d..59c9e67 100644 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ b/billingstack/openstack/common/rpc/impl_qpid.py @@ -67,6 +67,17 @@ cfg.BoolOpt('qpid_tcp_nodelay', default=True, help='Disable Nagle algorithm'), + # NOTE(russellb) If any additional versions are added (beyond 1 and 2), + # this file could probably use some additional refactoring so that the + # differences between each version are split into different classes. + cfg.IntOpt('qpid_topology_version', + default=1, + help="The qpid topology version to use. Version 1 is what " + "was originally used by impl_qpid. Version 2 includes " + "some backwards-incompatible changes that allow broker " + "federation to work. Users should update to version 2 " + "when they are able to take everything down, as it " + "requires a clean break."), ] cfg.CONF.register_opts(qpid_opts) @@ -74,10 +85,17 @@ JSON_CONTENT_TYPE = 'application/json; charset=utf8' +def raise_invalid_topology_version(conf): + msg = (_("Invalid value for qpid_topology_version: %d") % + conf.qpid_topology_version) + LOG.error(msg) + raise Exception(msg) + + class ConsumerBase(object): """Consumer base class.""" - def __init__(self, session, callback, node_name, node_opts, + def __init__(self, conf, session, callback, node_name, node_opts, link_name, link_opts): """Declare a queue on an amqp session. @@ -95,26 +113,38 @@ def __init__(self, session, callback, node_name, node_opts, self.receiver = None self.session = None - addr_opts = { - "create": "always", - "node": { - "type": "topic", - "x-declare": { + if conf.qpid_topology_version == 1: + addr_opts = { + "create": "always", + "node": { + "type": "topic", + "x-declare": { + "durable": True, + "auto-delete": True, + }, + }, + "link": { + "name": link_name, "durable": True, - "auto-delete": True, + "x-declare": { + "durable": False, + "auto-delete": True, + "exclusive": False, + }, }, - }, - "link": { - "name": link_name, - "durable": True, - "x-declare": { - "durable": False, - "auto-delete": True, - "exclusive": False, + } + addr_opts["node"]["x-declare"].update(node_opts) + elif conf.qpid_topology_version == 2: + addr_opts = { + "link": { + "x-declare": { + "auto-delete": True, + }, }, - }, - } - addr_opts["node"]["x-declare"].update(node_opts) + } + else: + raise_invalid_topology_version() + addr_opts["link"]["x-declare"].update(link_opts) self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) @@ -122,7 +152,7 @@ def __init__(self, session, callback, node_name, node_opts, self.connect(session) def connect(self, session): - """Declare the reciever on connect.""" + """Declare the receiver on connect.""" self._declare_receiver(session) def reconnect(self, session): @@ -181,16 +211,24 @@ def __init__(self, conf, session, msg_id, callback): 'callback' is the callback to call when messages are received """ - super(DirectConsumer, self).__init__( - session, callback, - "%s/%s" % (msg_id, msg_id), - {"type": "direct"}, - msg_id, - { - "auto-delete": conf.amqp_auto_delete, - "exclusive": True, - "durable": conf.amqp_durable_queues, - }) + link_opts = { + "auto-delete": conf.amqp_auto_delete, + "exclusive": True, + "durable": conf.amqp_durable_queues, + } + + if conf.qpid_topology_version == 1: + node_name = "%s/%s" % (msg_id, msg_id) + node_opts = {"type": "direct"} + elif conf.qpid_topology_version == 2: + node_name = "amq.direct/%s" % msg_id + node_opts = {} + else: + raise_invalid_topology_version() + + super(DirectConsumer, self).__init__(conf, session, callback, + node_name, node_opts, msg_id, + link_opts) class TopicConsumer(ConsumerBase): @@ -208,14 +246,20 @@ def __init__(self, conf, session, topic, callback, name=None, """ exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) - super(TopicConsumer, self).__init__( - session, callback, - "%s/%s" % (exchange_name, topic), - {}, name or topic, - { - "auto-delete": conf.amqp_auto_delete, - "durable": conf.amqp_durable_queues, - }) + link_opts = { + "auto-delete": conf.amqp_auto_delete, + "durable": conf.amqp_durable_queues, + } + + if conf.qpid_topology_version == 1: + node_name = "%s/%s" % (exchange_name, topic) + elif conf.qpid_topology_version == 2: + node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) + else: + raise_invalid_topology_version() + + super(TopicConsumer, self).__init__(conf, session, callback, node_name, + {}, name or topic, link_opts) class FanoutConsumer(ConsumerBase): @@ -230,12 +274,22 @@ def __init__(self, conf, session, topic, callback): """ self.conf = conf - super(FanoutConsumer, self).__init__( - session, callback, - "%s_fanout" % topic, - {"durable": False, "type": "fanout"}, - "%s_fanout_%s" % (topic, uuid.uuid4().hex), - {"exclusive": True}) + link_opts = {"exclusive": True} + + if conf.qpid_topology_version == 1: + node_name = "%s_fanout" % topic + node_opts = {"durable": False, "type": "fanout"} + link_name = "%s_fanout_%s" % (topic, uuid.uuid4().hex) + elif conf.qpid_topology_version == 2: + node_name = "amq.topic/fanout/%s" % topic + node_opts = {} + link_name = "" + else: + raise_invalid_topology_version() + + super(FanoutConsumer, self).__init__(conf, session, callback, + node_name, node_opts, link_name, + link_opts) def reconnect(self, session): topic = self.get_node_name().rpartition('_fanout')[0] @@ -253,29 +307,34 @@ def reconnect(self, session): class Publisher(object): """Base Publisher class.""" - def __init__(self, session, node_name, node_opts=None): + def __init__(self, conf, session, node_name, node_opts=None): """Init the Publisher class with the exchange_name, routing_key, and other options """ self.sender = None self.session = session - addr_opts = { - "create": "always", - "node": { - "type": "topic", - "x-declare": { - "durable": False, - # auto-delete isn't implemented for exchanges in qpid, - # but put in here anyway - "auto-delete": True, + if conf.qpid_topology_version == 1: + addr_opts = { + "create": "always", + "node": { + "type": "topic", + "x-declare": { + "durable": False, + # auto-delete isn't implemented for exchanges in qpid, + # but put in here anyway + "auto-delete": True, + }, }, - }, - } - if node_opts: - addr_opts["node"]["x-declare"].update(node_opts) + } + if node_opts: + addr_opts["node"]["x-declare"].update(node_opts) - self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) + self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) + elif conf.qpid_topology_version == 2: + self.address = node_name + else: + raise_invalid_topology_version() self.reconnect(session) @@ -319,39 +378,73 @@ class DirectPublisher(Publisher): """Publisher class for 'direct'.""" def __init__(self, conf, session, msg_id): """Init a 'direct' publisher.""" - super(DirectPublisher, self).__init__(session, msg_id, - {"type": "direct"}) + + if conf.qpid_topology_version == 1: + node_name = msg_id + node_opts = {"type": "direct"} + elif conf.qpid_topology_version == 2: + node_name = "amq.direct/%s" % msg_id + node_opts = {} + else: + raise_invalid_topology_version() + + super(DirectPublisher, self).__init__(conf, session, node_name, + node_opts) class TopicPublisher(Publisher): """Publisher class for 'topic'.""" def __init__(self, conf, session, topic): - """init a 'topic' publisher. + """Init a 'topic' publisher. """ exchange_name = rpc_amqp.get_control_exchange(conf) - super(TopicPublisher, self).__init__(session, - "%s/%s" % (exchange_name, topic)) + + if conf.qpid_topology_version == 1: + node_name = "%s/%s" % (exchange_name, topic) + elif conf.qpid_topology_version == 2: + node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) + else: + raise_invalid_topology_version() + + super(TopicPublisher, self).__init__(conf, session, node_name) class FanoutPublisher(Publisher): """Publisher class for 'fanout'.""" def __init__(self, conf, session, topic): - """init a 'fanout' publisher. + """Init a 'fanout' publisher. """ - super(FanoutPublisher, self).__init__( - session, - "%s_fanout" % topic, {"type": "fanout"}) + + if conf.qpid_topology_version == 1: + node_name = "%s_fanout" % topic + node_opts = {"type": "fanout"} + elif conf.qpid_topology_version == 2: + node_name = "amq.topic/fanout/%s" % topic + node_opts = {} + else: + raise_invalid_topology_version() + + super(FanoutPublisher, self).__init__(conf, session, node_name, + node_opts) class NotifyPublisher(Publisher): """Publisher class for notifications.""" def __init__(self, conf, session, topic): - """init a 'topic' publisher. + """Init a 'topic' publisher. """ exchange_name = rpc_amqp.get_control_exchange(conf) - super(NotifyPublisher, self).__init__(session, - "%s/%s" % (exchange_name, topic), - {"durable": True}) + node_opts = {"durable": True} + + if conf.qpid_topology_version == 1: + node_name = "%s/%s" % (exchange_name, topic) + elif conf.qpid_topology_version == 2: + node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) + else: + raise_invalid_topology_version() + + super(NotifyPublisher, self).__init__(conf, session, node_name, + node_opts) class Connection(object): @@ -665,6 +758,7 @@ def join_consumer_pool(self, callback, pool_name, topic, callback=callback, connection_pool=rpc_amqp.get_connection_pool(self.conf, Connection), + wait_for_consumers=not ack_on_error ) self.proxy_callbacks.append(callback_wrapper) diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py index 4f7e9eb..63963df 100644 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ b/billingstack/openstack/common/rpc/impl_zmq.py @@ -192,7 +192,7 @@ def close(self): # it would be much worse if some of the code calling this # were to fail. For now, lets log, and later evaluate # if we can safely raise here. - LOG.error("ZeroMQ socket could not be closed.") + LOG.error(_("ZeroMQ socket could not be closed.")) self.sock = None def recv(self, **kwargs): diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py index 6edf599..2b791d7 100644 --- a/billingstack/openstack/common/rpc/proxy.py +++ b/billingstack/openstack/common/rpc/proxy.py @@ -21,7 +21,6 @@ rpc/dispatcher.py """ - from billingstack.openstack.common import rpc from billingstack.openstack.common.rpc import common as rpc_common from billingstack.openstack.common.rpc import serializer as rpc_serializer @@ -36,7 +35,7 @@ class RpcProxy(object): rpc API. """ - # The default namespace, which can be overriden in a subclass. + # The default namespace, which can be overridden in a subclass. RPC_API_NAMESPACE = None def __init__(self, topic, default_version, version_cap=None, diff --git a/billingstack/openstack/common/rpc/serializer.py b/billingstack/openstack/common/rpc/serializer.py index 76c6831..9bc6e2a 100644 --- a/billingstack/openstack/common/rpc/serializer.py +++ b/billingstack/openstack/common/rpc/serializer.py @@ -16,10 +16,12 @@ import abc +import six + +@six.add_metaclass(abc.ABCMeta) class Serializer(object): """Generic (de-)serialization definition base class.""" - __metaclass__ = abc.ABCMeta @abc.abstractmethod def serialize_entity(self, context, entity): diff --git a/billingstack/openstack/common/service.py b/billingstack/openstack/common/service.py index 613340a..0530911 100644 --- a/billingstack/openstack/common/service.py +++ b/billingstack/openstack/common/service.py @@ -20,6 +20,7 @@ """Generic Node base class for all workers that run on hosts.""" import errno +import logging as std_logging import os import random import signal @@ -28,7 +29,6 @@ import eventlet from eventlet import event -import logging as std_logging from oslo.config import cfg from billingstack.openstack.common import eventlet_backdoor @@ -43,6 +43,29 @@ LOG = logging.getLogger(__name__) +def _sighup_supported(): + return hasattr(signal, 'SIGHUP') + + +def _is_sighup(signo): + return _sighup_supported() and signo == signal.SIGHUP + + +def _signo_to_signame(signo): + signals = {signal.SIGTERM: 'SIGTERM', + signal.SIGINT: 'SIGINT'} + if _sighup_supported(): + signals[signal.SIGHUP] = 'SIGHUP' + return signals[signo] + + +def _set_signals_handler(handler): + signal.signal(signal.SIGTERM, handler) + signal.signal(signal.SIGINT, handler) + if _sighup_supported(): + signal.signal(signal.SIGHUP, handler) + + class Launcher(object): """Launch one or more services and wait for them to complete.""" @@ -100,18 +123,13 @@ def __init__(self, signo, exccode=1): class ServiceLauncher(Launcher): def _handle_signal(self, signo, frame): # Allow the process to be killed again and die from natural causes - signal.signal(signal.SIGTERM, signal.SIG_DFL) - signal.signal(signal.SIGINT, signal.SIG_DFL) - signal.signal(signal.SIGHUP, signal.SIG_DFL) - + _set_signals_handler(signal.SIG_DFL) raise SignalExit(signo) def handle_signal(self): - signal.signal(signal.SIGTERM, self._handle_signal) - signal.signal(signal.SIGINT, self._handle_signal) - signal.signal(signal.SIGHUP, self._handle_signal) + _set_signals_handler(self._handle_signal) - def _wait_for_exit_or_signal(self): + def _wait_for_exit_or_signal(self, ready_callback=None): status = None signo = 0 @@ -119,11 +137,11 @@ def _wait_for_exit_or_signal(self): CONF.log_opt_values(LOG, std_logging.DEBUG) try: + if ready_callback: + ready_callback() super(ServiceLauncher, self).wait() except SignalExit as exc: - signame = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT', - signal.SIGHUP: 'SIGHUP'}[exc.signo] + signame = _signo_to_signame(exc.signo) LOG.info(_('Caught %s, exiting'), signame) status = exc.code signo = exc.signo @@ -140,11 +158,11 @@ def _wait_for_exit_or_signal(self): return status, signo - def wait(self): + def wait(self, ready_callback=None): while True: self.handle_signal() - status, signo = self._wait_for_exit_or_signal() - if signo != signal.SIGHUP: + status, signo = self._wait_for_exit_or_signal(ready_callback) + if not _is_sighup(signo): return status self.restart() @@ -167,18 +185,14 @@ def __init__(self): self.handle_signal() def handle_signal(self): - signal.signal(signal.SIGTERM, self._handle_signal) - signal.signal(signal.SIGINT, self._handle_signal) - signal.signal(signal.SIGHUP, self._handle_signal) + _set_signals_handler(self._handle_signal) def _handle_signal(self, signo, frame): self.sigcaught = signo self.running = False # Allow the process to be killed again and die from natural causes - signal.signal(signal.SIGTERM, signal.SIG_DFL) - signal.signal(signal.SIGINT, signal.SIG_DFL) - signal.signal(signal.SIGHUP, signal.SIG_DFL) + _set_signals_handler(signal.SIG_DFL) def _pipe_watcher(self): # This will block until the write end is closed when the parent @@ -200,7 +214,8 @@ def _sighup(*args): raise SignalExit(signal.SIGHUP) signal.signal(signal.SIGTERM, _sigterm) - signal.signal(signal.SIGHUP, _sighup) + if _sighup_supported(): + signal.signal(signal.SIGHUP, _sighup) # Block SIGINT and let the parent send us a SIGTERM signal.signal(signal.SIGINT, signal.SIG_IGN) @@ -208,12 +223,13 @@ def _child_wait_for_exit_or_signal(self, launcher): status = None signo = 0 + # NOTE(johannes): All exceptions are caught to ensure this + # doesn't fallback into the loop spawning children. It would + # be bad for a child to spawn more children. try: launcher.wait() except SignalExit as exc: - signame = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT', - signal.SIGHUP: 'SIGHUP'}[exc.signo] + signame = _signo_to_signame(exc.signo) LOG.info(_('Caught %s, exiting'), signame) status = exc.code signo = exc.signo @@ -262,14 +278,11 @@ def _start_child(self, wrap): pid = os.fork() if pid == 0: - # NOTE(johannes): All exceptions are caught to ensure this - # doesn't fallback into the loop spawning children. It would - # be bad for a child to spawn more children. launcher = self._child_process(wrap.service) while True: self._child_process_handle_signal() status, signo = self._child_wait_for_exit_or_signal(launcher) - if signo != signal.SIGHUP: + if not _is_sighup(signo): break launcher.restart() @@ -339,11 +352,9 @@ def wait(self): self.handle_signal() self._respawn_children() if self.sigcaught: - signame = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT', - signal.SIGHUP: 'SIGHUP'}[self.sigcaught] + signame = _signo_to_signame(self.sigcaught) LOG.info(_('Caught %s, stopping children'), signame) - if self.sigcaught != signal.SIGHUP: + if not _is_sighup(self.sigcaught): break for pid in self.children: diff --git a/billingstack/openstack/common/test.py b/billingstack/openstack/common/test.py new file mode 100644 index 0000000..8d63bdc --- /dev/null +++ b/billingstack/openstack/common/test.py @@ -0,0 +1,54 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright 2010-2011 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +"""Common utilities used in testing""" + +import os + +import fixtures +import testtools + + +class BaseTestCase(testtools.TestCase): + + def setUp(self): + super(BaseTestCase, self).setUp() + self._set_timeout() + self._fake_output() + self.useFixture(fixtures.FakeLogger('billingstack.openstack.common')) + self.useFixture(fixtures.NestedTempfile()) + self.useFixture(fixtures.TempHomeDir()) + + def _set_timeout(self): + test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) + try: + test_timeout = int(test_timeout) + except ValueError: + # If timeout value is invalid do not set a timeout. + test_timeout = 0 + if test_timeout > 0: + self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) + + def _fake_output(self): + if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or + os.environ.get('OS_STDOUT_CAPTURE') == '1'): + stdout = self.useFixture(fixtures.StringStream('stdout')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) + if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or + os.environ.get('OS_STDERR_CAPTURE') == '1'): + stderr = self.useFixture(fixtures.StringStream('stderr')).stream + self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) diff --git a/billingstack/openstack/common/threadgroup.py b/billingstack/openstack/common/threadgroup.py index 2eef8fd..c7f9153 100644 --- a/billingstack/openstack/common/threadgroup.py +++ b/billingstack/openstack/common/threadgroup.py @@ -48,6 +48,9 @@ def stop(self): def wait(self): return self.thread.wait() + def link(self, func, *args, **kwargs): + self.thread.link(func, *args, **kwargs) + class ThreadGroup(object): """The point of the ThreadGroup classis to: @@ -79,6 +82,7 @@ def add_thread(self, callback, *args, **kwargs): gt = self.pool.spawn(callback, *args, **kwargs) th = Thread(gt, self) self.threads.append(th) + return th def thread_done(self, thread): self.threads.remove(thread) diff --git a/billingstack/openstack/common/timeutils.py b/billingstack/openstack/common/timeutils.py index aa9f708..b79ebf3 100644 --- a/billingstack/openstack/common/timeutils.py +++ b/billingstack/openstack/common/timeutils.py @@ -21,6 +21,7 @@ import calendar import datetime +import time import iso8601 import six @@ -49,9 +50,9 @@ def parse_isotime(timestr): try: return iso8601.parse_date(timestr) except iso8601.ParseError as e: - raise ValueError(unicode(e)) + raise ValueError(six.text_type(e)) except TypeError as e: - raise ValueError(unicode(e)) + raise ValueError(six.text_type(e)) def strtime(at=None, fmt=PERFECT_TIME_FORMAT): @@ -90,6 +91,11 @@ def is_newer_than(after, seconds): def utcnow_ts(): """Timestamp version of our utcnow function.""" + if utcnow.override_time is None: + # NOTE(kgriffs): This is several times faster + # than going through calendar.timegm(...) + return int(time.time()) + return calendar.timegm(utcnow().timetuple()) @@ -111,12 +117,15 @@ def iso8601_from_timestamp(timestamp): utcnow.override_time = None -def set_time_override(override_time=datetime.datetime.utcnow()): +def set_time_override(override_time=None): """Overrides utils.utcnow. Make it return a constant time or a list thereof, one at a time. + + :param override_time: datetime instance or list thereof. If not + given, defaults to the current UTC time. """ - utcnow.override_time = override_time + utcnow.override_time = override_time or datetime.datetime.utcnow() def advance_time_delta(timedelta): diff --git a/billingstack/openstack/common/versionutils.py b/billingstack/openstack/common/versionutils.py new file mode 100644 index 0000000..f7b1f8a --- /dev/null +++ b/billingstack/openstack/common/versionutils.py @@ -0,0 +1,45 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +# Copyright (c) 2013 OpenStack Foundation +# All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +""" +Helpers for comparing version strings. +""" + +import pkg_resources + + +def is_compatible(requested_version, current_version, same_major=True): + """Determine whether `requested_version` is satisfied by + `current_version`; in other words, `current_version` is >= + `requested_version`. + + :param requested_version: version to check for compatibility + :param current_version: version to check against + :param same_major: if True, the major version must be identical between + `requested_version` and `current_version`. This is used when a + major-version difference indicates incompatibility between the two + versions. Since this is the common-case in practice, the default is + True. + :returns: True if compatible, False if not + """ + requested_parts = pkg_resources.parse_version(requested_version) + current_parts = pkg_resources.parse_version(current_version) + + if same_major and (requested_parts[0] != current_parts[0]): + return False + + return current_parts >= requested_parts diff --git a/billingstack/taskflow/__init__.py b/billingstack/taskflow/__init__.py deleted file mode 100644 index 1f19be5..0000000 --- a/billingstack/taskflow/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 diff --git a/billingstack/taskflow/decorators.py b/billingstack/taskflow/decorators.py deleted file mode 100644 index c5320df..0000000 --- a/billingstack/taskflow/decorators.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from billingstack.taskflow import functor_task -from billingstack.taskflow import utils - - -def wraps(fn): - """This will not be needed in python 3.2 or greater which already has this - built-in to its functools.wraps method. - """ - - def wrapper(f): - f = functools.wraps(fn)(f) - f.__wrapped__ = getattr(fn, '__wrapped__', fn) - return f - - return wrapper - - -def locked(*args, **kwargs): - - def decorator(f): - attr_name = kwargs.get('lock', '_lock') - - @wraps(f) - def wrapper(*args, **kwargs): - lock = getattr(args[0], attr_name) - with lock: - return f(*args, **kwargs) - - return wrapper - - # This is needed to handle when the decorator has args or the decorator - # doesn't have args, python is rather weird here... - if kwargs or not args: - return decorator - else: - if len(args) == 1: - return decorator(args[0]) - else: - return decorator - - -def _original_function(fun): - """Get original function from static or class method""" - if isinstance(fun, staticmethod): - return fun.__get__(object()) - elif isinstance(fun, classmethod): - return fun.__get__(object()).im_func - return fun - - -def task(*args, **kwargs): - """Decorates a given function so that it can be used as a task""" - - def decorator(f): - def task_factory(execute_with, **factory_kwargs): - merged = kwargs.copy() - merged.update(factory_kwargs) - # NOTE(imelnikov): we can't capture f here because for - # bound methods and bound class methods the object it - # is bound to is yet unknown at the moment - return functor_task.FunctorTask(execute_with, **merged) - w_f = _original_function(f) - setattr(w_f, utils.TASK_FACTORY_ATTRIBUTE, task_factory) - return f - - # This is needed to handle when the decorator has args or the decorator - # doesn't have args, python is rather weird here... - if kwargs: - if args: - raise TypeError('task decorator takes 0 positional arguments,' - '%s given' % len(args)) - return decorator - else: - if len(args) == 1: - return decorator(args[0]) - else: - return decorator diff --git a/billingstack/taskflow/exceptions.py b/billingstack/taskflow/exceptions.py deleted file mode 100644 index 7f572ff..0000000 --- a/billingstack/taskflow/exceptions.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class TaskFlowException(Exception): - """Base class for exceptions emitted from this library.""" - pass - - -class Duplicate(TaskFlowException): - """Raised when a duplicate entry is found.""" - pass - - -class StorageError(TaskFlowException): - """Raised when logbook can not be read/saved/deleted.""" - - def __init__(self, message, cause=None): - super(StorageError, self).__init__(message) - self.cause = cause - - -class NotFound(TaskFlowException): - """Raised when some entry in some object doesn't exist.""" - pass - - -class AlreadyExists(TaskFlowException): - """Raised when some entry in some object already exists.""" - pass - - -class ClosedException(TaskFlowException): - """Raised when an access on a closed object occurs.""" - pass - - -class InvalidStateException(TaskFlowException): - """Raised when a task/job/workflow is in an invalid state when an - operation is attempting to apply to said task/job/workflow. - """ - pass - - -class UnclaimableJobException(TaskFlowException): - """Raised when a job can not be claimed.""" - pass - - -class JobNotFound(TaskFlowException): - """Raised when a job entry can not be found.""" - pass - - -class MissingDependencies(InvalidStateException): - """Raised when a entity has dependencies that can not be satisified.""" - message = ("%(who)s requires %(requirements)s but no other entity produces" - " said requirements") - - def __init__(self, who, requirements): - message = self.message % {'who': who, 'requirements': requirements} - super(MissingDependencies, self).__init__(message) diff --git a/billingstack/taskflow/flow.py b/billingstack/taskflow/flow.py deleted file mode 100644 index e295574..0000000 --- a/billingstack/taskflow/flow.py +++ /dev/null @@ -1,216 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import threading - -from billingstack.openstack.common import uuidutils - -from billingstack.taskflow import exceptions as exc -from billingstack.taskflow import states -from billingstack.taskflow import utils - - -class Flow(object): - """The base abstract class of all flow implementations. - - It provides a set of parents to flows that have a concept of parent flows - as well as a state and state utility functions to the deriving classes. It - also provides a name and an identifier (uuid or other) to the flow so that - it can be uniquely identifed among many flows. - - Flows are expected to provide (if desired) the following methods: - - add - - add_many - - interrupt - - reset - - rollback - - run - - soft_reset - """ - - __metaclass__ = abc.ABCMeta - - # Common states that certain actions can be performed in. If the flow - # is not in these sets of states then it is likely that the flow operation - # can not succeed. - RESETTABLE_STATES = set([ - states.INTERRUPTED, - states.SUCCESS, - states.PENDING, - states.FAILURE, - ]) - SOFT_RESETTABLE_STATES = set([ - states.INTERRUPTED, - ]) - UNINTERRUPTIBLE_STATES = set([ - states.FAILURE, - states.SUCCESS, - states.PENDING, - ]) - RUNNABLE_STATES = set([ - states.PENDING, - ]) - - def __init__(self, name, parents=None, uuid=None): - self._name = str(name) - # The state of this flow. - self._state = states.PENDING - # If this flow has a parent flow/s which need to be reverted if - # this flow fails then please include them here to allow this child - # to call the parents... - if parents: - self.parents = tuple(parents) - else: - self.parents = tuple([]) - # Any objects that want to listen when a wf/task starts/stops/completes - # or errors should be registered here. This can be used to monitor - # progress and record tasks finishing (so that it becomes possible to - # store the result of a task in some persistent or semi-persistent - # storage backend). - self.notifier = utils.TransitionNotifier() - self.task_notifier = utils.TransitionNotifier() - # Assign this flow a unique identifer. - if uuid: - self._id = str(uuid) - else: - self._id = uuidutils.generate_uuid() - # Ensure we can not change the state at the same time in 2 different - # threads. - self._state_lock = threading.RLock() - - @property - def name(self): - """A non-unique name for this flow (human readable)""" - return self._name - - @property - def uuid(self): - return self._id - - @property - def state(self): - """Provides a read-only view of the flow state.""" - return self._state - - def _change_state(self, context, new_state, check_func=None, notify=True): - old_state = None - changed = False - with self._state_lock: - if self.state != new_state: - if (not check_func or - (check_func and check_func(self.state))): - changed = True - old_state = self.state - self._state = new_state - # Don't notify while holding the lock so that the reciever of said - # notifications can actually perform operations on the given flow - # without getting into deadlock. - if notify and changed: - self.notifier.notify(self.state, details={ - 'context': context, - 'flow': self, - 'old_state': old_state, - }) - return changed - - def __str__(self): - lines = ["Flow: %s" % (self.name)] - lines.append("%s" % (self.uuid)) - lines.append("%s" % (len(self.parents))) - lines.append("%s" % (self.state)) - return "; ".join(lines) - - @abc.abstractmethod - def add(self, task): - """Adds a given task to this flow. - - Returns the uuid that is associated with the task for later operations - before and after it is ran. - """ - raise NotImplementedError() - - def add_many(self, tasks): - """Adds many tasks to this flow. - - Returns a list of uuids (one for each task added). - """ - uuids = [] - for t in tasks: - uuids.append(self.add(t)) - return uuids - - def interrupt(self): - """Attempts to interrupt the current flow and any tasks that are - currently not running in the flow. - - Returns how many tasks were interrupted (if any). - """ - def check(): - if self.state in self.UNINTERRUPTIBLE_STATES: - raise exc.InvalidStateException(("Can not interrupt when" - " in state %s") % self.state) - - check() - with self._state_lock: - check() - self._change_state(None, states.INTERRUPTED) - return 0 - - def reset(self): - """Fully resets the internal state of this flow, allowing for the flow - to be ran again. - - Note: Listeners are also reset. - """ - def check(): - if self.state not in self.RESETTABLE_STATES: - raise exc.InvalidStateException(("Can not reset when" - " in state %s") % self.state) - - check() - with self._state_lock: - check() - self.notifier.reset() - self.task_notifier.reset() - self._change_state(None, states.PENDING) - - def soft_reset(self): - """Partially resets the internal state of this flow, allowing for the - flow to be ran again from an interrupted state. - """ - def check(): - if self.state not in self.SOFT_RESETTABLE_STATES: - raise exc.InvalidStateException(("Can not soft reset when" - " in state %s") % self.state) - - check() - with self._state_lock: - check() - self._change_state(None, states.PENDING) - - @abc.abstractmethod - def run(self, context, *args, **kwargs): - """Executes the workflow.""" - raise NotImplementedError() - - def rollback(self, context, cause): - """Performs rollback of this workflow and any attached parent workflows - if present. - """ - pass diff --git a/billingstack/taskflow/functor_task.py b/billingstack/taskflow/functor_task.py deleted file mode 100644 index 2f834e6..0000000 --- a/billingstack/taskflow/functor_task.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012-2013 Yahoo! Inc. All Rights Reserved. -# Copyright (C) 2013 AT&T Labs Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import inspect - -from billingstack.taskflow import task as base - -# These arguments are ones that we will skip when parsing for requirements -# for a function to operate (when used as a task). -AUTO_ARGS = ('self', 'context', 'cls') - - -def _filter_arg(arg): - if arg in AUTO_ARGS: - return False - # In certain decorator cases it seems like we get the function to be - # decorated as an argument, we don't want to take that as a real argument. - if not isinstance(arg, basestring): - return False - return True - - -class FunctorTask(base.Task): - """Adaptor to make task from a callable - - Take any callable and make a task from it. - """ - @staticmethod - def _get_callable_name(execute_with): - """Generate a name from callable""" - im_class = getattr(execute_with, 'im_class', None) - if im_class is not None: - parts = (im_class.__module__, im_class.__name__, - execute_with.__name__) - else: - parts = (execute_with.__module__, execute_with.__name__) - return '.'.join(parts) - - def __init__(self, execute_with, **kwargs): - """Initialize FunctorTask instance with given callable and kwargs - - :param execute_with: the callable - :param kwargs: reserved keywords (all optional) are - name: name of the task, default None (auto generate) - task_id: id of the task, default None (auto generate) - revert_with: the callable to revert, default None - version: version of the task, default Task's version 1.0 - optionals: optionals of the task, default () - provides: provides of the task, default () - requires: requires of the task, default () - auto_extract: auto extract execute_with's args and put it into - requires, default True - """ - name = kwargs.pop('name', None) - task_id = kwargs.pop('task_id', None) - if name is None: - name = self._get_callable_name(execute_with) - super(FunctorTask, self).__init__(name, task_id) - self._execute_with = execute_with - self._revert_with = kwargs.pop('revert_with', None) - self.version = kwargs.pop('version', self.version) - self.optional.update(kwargs.pop('optional', ())) - self.provides.update(kwargs.pop('provides', ())) - self.requires.update(kwargs.pop('requires', ())) - if kwargs.pop('auto_extract', True): - f_args = inspect.getargspec(execute_with).args - self.requires.update([arg for arg in f_args if _filter_arg(arg)]) - if kwargs: - raise TypeError('__init__() got an unexpected keyword argument %r' - % kwargs.keys[0]) - - def __call__(self, *args, **kwargs): - return self._execute_with(*args, **kwargs) - - def revert(self, *args, **kwargs): - if self._revert_with: - return self._revert_with(*args, **kwargs) - else: - return None diff --git a/billingstack/taskflow/graph_utils.py b/billingstack/taskflow/graph_utils.py deleted file mode 100644 index 005924b..0000000 --- a/billingstack/taskflow/graph_utils.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -from billingstack.taskflow import exceptions as exc - - -LOG = logging.getLogger(__name__) - - -def connect(graph, infer_key='infer', auto_reason='auto', discard_func=None): - """Connects a graphs runners to other runners in the graph which provide - outputs for each runners requirements. - """ - - if len(graph) == 0: - return - if discard_func: - for (u, v, e_data) in graph.edges(data=True): - if discard_func(u, v, e_data): - graph.remove_edge(u, v) - for (r, r_data) in graph.nodes_iter(data=True): - requires = set(r.requires) - - # Find the ones that have already been attached manually. - manual_providers = {} - if requires: - incoming = [e[0] for e in graph.in_edges_iter([r])] - for r2 in incoming: - fulfills = requires & r2.provides - if fulfills: - LOG.debug("%s is a manual provider of %s for %s", - r2, fulfills, r) - for k in fulfills: - manual_providers[k] = r2 - requires.remove(k) - - # Anything leftover that we must find providers for?? - auto_providers = {} - if requires and r_data.get(infer_key): - for r2 in graph.nodes_iter(): - if r is r2: - continue - fulfills = requires & r2.provides - if fulfills: - graph.add_edge(r2, r, reason=auto_reason) - LOG.debug("Connecting %s as a automatic provider for" - " %s for %s", r2, fulfills, r) - for k in fulfills: - auto_providers[k] = r2 - requires.remove(k) - if not requires: - break - - # Anything still leftover?? - if requires: - # Ensure its in string format, since join will puke on - # things that are not strings. - missing = ", ".join(sorted([str(s) for s in requires])) - raise exc.MissingDependencies(r, missing) - else: - r.providers = {} - r.providers.update(auto_providers) - r.providers.update(manual_providers) diff --git a/billingstack/taskflow/patterns/__init__.py b/billingstack/taskflow/patterns/__init__.py deleted file mode 100644 index 1f19be5..0000000 --- a/billingstack/taskflow/patterns/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 diff --git a/billingstack/taskflow/patterns/linear_flow.py b/billingstack/taskflow/patterns/linear_flow.py deleted file mode 100644 index f25feed..0000000 --- a/billingstack/taskflow/patterns/linear_flow.py +++ /dev/null @@ -1,286 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import collections -import functools -import logging -import threading - -from billingstack.openstack.common import excutils - -from billingstack.taskflow import decorators -from billingstack.taskflow import exceptions as exc -from billingstack.taskflow import states -from billingstack.taskflow import utils - -from billingstack.taskflow import flow - -LOG = logging.getLogger(__name__) - - -class Flow(flow.Flow): - """"A linear chain of tasks that can be applied in order as one unit and - rolled back as one unit using the reverse order that the tasks have - been applied in. - - Note(harlowja): Each task in the chain must have requirements - which are satisfied by the previous task/s in the chain. - """ - - def __init__(self, name, parents=None, uuid=None): - super(Flow, self).__init__(name, parents, uuid) - # The tasks which have been applied will be collected here so that they - # can be reverted in the correct order on failure. - self._accumulator = utils.RollbackAccumulator() - # Tasks results are stored here. Lookup is by the uuid that was - # returned from the add function. - self.results = {} - # The previously left off iterator that can be used to resume from - # the last task (if interrupted and soft-reset). - self._leftoff_at = None - # All runners to run are collected here. - self._runners = [] - self._connected = False - self._lock = threading.RLock() - # The resumption strategy to use. - self.resumer = None - - @decorators.locked - def add(self, task): - """Adds a given task to this flow.""" - assert isinstance(task, collections.Callable) - r = utils.AOTRunner(task) - r.runs_before = list(reversed(self._runners)) - self._runners.append(r) - self._reset_internals() - return r.uuid - - def _reset_internals(self): - self._connected = False - self._leftoff_at = None - - def _associate_providers(self, runner): - # Ensure that some previous task provides this input. - who_provides = {} - task_requires = runner.requires - for r in task_requires: - provider = None - for before_me in runner.runs_before: - if r in before_me.provides: - provider = before_me - break - if provider: - who_provides[r] = provider - # Ensure that the last task provides all the needed input for this - # task to run correctly. - missing_requires = task_requires - set(who_provides.keys()) - if missing_requires: - raise exc.MissingDependencies(runner, sorted(missing_requires)) - runner.providers.update(who_provides) - - def __str__(self): - lines = ["LinearFlow: %s" % (self.name)] - lines.append("%s" % (self.uuid)) - lines.append("%s" % (len(self._runners))) - lines.append("%s" % (len(self.parents))) - lines.append("%s" % (self.state)) - return "; ".join(lines) - - @decorators.locked - def remove(self, uuid): - index_removed = -1 - for (i, r) in enumerate(self._runners): - if r.uuid == uuid: - index_removed = i - break - if index_removed == -1: - raise ValueError("No runner found with uuid %s" % (uuid)) - else: - removed = self._runners.pop(index_removed) - self._reset_internals() - # Go and remove it from any runner after the removed runner since - # those runners may have had an attachment to it. - for r in self._runners[index_removed:]: - try: - r.runs_before.remove(removed) - except (IndexError, ValueError): - pass - - def __len__(self): - return len(self._runners) - - def _connect(self): - if self._connected: - return self._runners - for r in self._runners: - r.providers = {} - for r in reversed(self._runners): - self._associate_providers(r) - self._connected = True - return self._runners - - def _ordering(self): - return iter(self._connect()) - - @decorators.locked - def run(self, context, *args, **kwargs): - - def abort_if(current_state, ok_states): - if current_state not in ok_states: - return False - return True - - def resume_it(): - if self._leftoff_at is not None: - return ([], self._leftoff_at) - if self.resumer: - (finished, leftover) = self.resumer(self, self._ordering()) - else: - finished = [] - leftover = self._ordering() - return (finished, leftover) - - start_check_functor = functools.partial(abort_if, - ok_states=self.RUNNABLE_STATES) - if not self._change_state(context, states.STARTED, - check_func=start_check_functor): - return - try: - those_finished, leftover = resume_it() - except Exception: - with excutils.save_and_reraise_exception(): - self._change_state(context, states.FAILURE) - - def run_it(runner, failed=False, result=None, simulate_run=False): - try: - # Add the task to be rolled back *immediately* so that even if - # the task fails while producing results it will be given a - # chance to rollback. - rb = utils.RollbackTask(context, runner.task, result=None) - self._accumulator.add(rb) - self.task_notifier.notify(states.STARTED, details={ - 'context': context, - 'flow': self, - 'runner': runner, - }) - if not simulate_run: - result = runner(context, *args, **kwargs) - else: - if failed: - # TODO(harlowja): make this configurable?? - # If we previously failed, we want to fail again at - # the same place. - if not result: - # If no exception or exception message was provided - # or captured from the previous run then we need to - # form one for this task. - result = "%s failed running." % (runner.task) - if isinstance(result, basestring): - result = exc.InvalidStateException(result) - if not isinstance(result, Exception): - LOG.warn("Can not raise a non-exception" - " object: %s", result) - result = exc.InvalidStateException() - raise result - # Adjust the task result in the accumulator before - # notifying others that the task has finished to - # avoid the case where a listener might throw an - # exception. - rb.result = result - runner.result = result - self.results[runner.uuid] = result - self.task_notifier.notify(states.SUCCESS, details={ - 'context': context, - 'flow': self, - 'runner': runner, - }) - except Exception as e: - runner.result = e - cause = utils.FlowFailure(runner, self, e) - with excutils.save_and_reraise_exception(): - # Notify any listeners that the task has errored. - self.task_notifier.notify(states.FAILURE, details={ - 'context': context, - 'flow': self, - 'runner': runner, - }) - self.rollback(context, cause) - - run_check_functor = functools.partial(abort_if, - ok_states=[states.STARTED, - states.RESUMING]) - if len(those_finished): - if not self._change_state(context, states.RESUMING, - check_func=run_check_functor): - return - for (r, details) in those_finished: - # Fake running the task so that we trigger the same - # notifications and state changes (and rollback that - # would have happened in a normal flow). - failed = states.FAILURE in details.get('states', []) - result = details.get('result') - run_it(r, failed=failed, result=result, simulate_run=True) - - self._leftoff_at = leftover - if not self._change_state(context, states.RUNNING, - check_func=run_check_functor): - return - - was_interrupted = False - for r in leftover: - r.reset() - run_it(r) - if self.state == states.INTERRUPTED: - was_interrupted = True - break - - if not was_interrupted: - # Only gets here if everything went successfully. - self._change_state(context, states.SUCCESS) - self._leftoff_at = None - - @decorators.locked - def reset(self): - super(Flow, self).reset() - self.results = {} - self.resumer = None - self._accumulator.reset() - self._reset_internals() - - @decorators.locked - def rollback(self, context, cause): - # Performs basic task by task rollback by going through the reverse - # order that tasks have finished and asking said task to undo whatever - # it has done. If this flow has any parent flows then they will - # also be called to rollback any tasks said parents contain. - # - # Note(harlowja): if a flow can more simply revert a whole set of - # tasks via a simpler command then it can override this method to - # accomplish that. - # - # For example, if each task was creating a file in a directory, then - # it's easier to just remove the directory than to ask each task to - # delete its file individually. - self._change_state(context, states.REVERTING) - try: - self._accumulator.rollback(cause) - finally: - self._change_state(context, states.FAILURE) - # Rollback any parents flows if they exist... - for p in self.parents: - p.rollback(context, cause) diff --git a/billingstack/taskflow/patterns/threaded_flow.py b/billingstack/taskflow/patterns/threaded_flow.py deleted file mode 100644 index 02c2ceb..0000000 --- a/billingstack/taskflow/patterns/threaded_flow.py +++ /dev/null @@ -1,636 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from billingstack.taskflow import exceptions as exc -from billingstack.taskflow import flow -from billingstack.taskflow import graph_utils -from billingstack.taskflow import states -from billingstack.taskflow import utils - -import collections -import functools -import logging -import sys -import threading -import weakref - -from networkx.algorithms import cycles -from networkx.classes import digraph - -LOG = logging.getLogger(__name__) - - -class DependencyTimeout(exc.InvalidStateException): - """When running in parallel a task has the ability to timeout waiting for - its dependent tasks to finish, this will be raised when that occurs. - """ - pass - - -class Flow(flow.Flow): - """This flow pattern establishes tasks into a graph where each task is a - node in the graph and dependencies between tasks are edges in the graph. - When running (in parallel) each task will only be activated when its - dependencies have been satisified. When a graph is split into two or more - segments, both of those segments will be ran in parallel. - - For example lets take this small little *somewhat complicated* graph: - - X--Y--C--D - | | - A--B-- --G-- - | | |--Z(end) - E--F-- --H-- - - In this flow the following will be ran in parallel at start: - 1. X--Y - 2. A--B - 3. E--F - Note the C--D nodes will not be able to run until [Y,B,F] has completed. - After C--D completes the following will be ran in parallel: - 1. G - 2. H - Then finally Z will run (after [G,H] complete) and the flow will then have - finished executing. - """ - MUTABLE_STATES = set([states.PENDING, states.FAILURE, states.SUCCESS]) - REVERTABLE_STATES = set([states.FAILURE, states.INCOMPLETE]) - CANCELLABLE_STATES = set([states.PENDING, states.RUNNING]) - - def __init__(self, name): - super(Flow, self).__init__(name) - self._graph = digraph.DiGraph(name=name) - self._run_lock = threading.RLock() - self._cancel_lock = threading.RLock() - self._mutate_lock = threading.RLock() - # NOTE(harlowja) The locking order in this list actually matters since - # we need to make sure that users of this list do not get deadlocked - # by out of order lock access. - self._core_locks = [ - self._run_lock, - self._mutate_lock, - self._cancel_lock, - ] - self._run_locks = [ - self._run_lock, - self._mutate_lock, - ] - self._cancel_locks = [ - self._cancel_lock, - ] - self.results = {} - self.resumer = None - - def __str__(self): - lines = ["ParallelFlow: %s" % (self.name)] - lines.append("%s" % (self._graph.number_of_nodes())) - lines.append("%s" % (self.state)) - return "; ".join(lines) - - def soft_reset(self): - # The way this flow works does not allow (at the current moment) for - # you to suspend the threads and then resume them at a later time, - # instead it only supports interruption (which will cancel the threads) - # and then a full reset. - raise NotImplementedError("Threaded flow does not currently support" - " soft resetting, please try using" - " reset() instead") - - def interrupt(self): - """Currently we can not pause threads and then resume them later, not - really thinking that we should likely ever do this. - """ - raise NotImplementedError("Threaded flow does not currently support" - " interruption, please try using" - " cancel() instead") - - def reset(self): - # All locks are used so that resets can not happen while running or - # cancelling or modifying. - with utils.MultiLock(self._core_locks): - super(Flow, self).reset() - self.results = {} - self.resumer = None - - def cancel(self): - - def check(): - if self.state not in self.CANCELLABLE_STATES: - raise exc.InvalidStateException("Can not attempt cancellation" - " when in state %s" % - self.state) - - check() - cancelled = 0 - was_empty = False - - # We don't lock the other locks so that the flow can be cancelled while - # running. Further state management logic is then used while running - # to verify that the flow should still be running when it has been - # cancelled. - with utils.MultiLock(self._cancel_locks): - check() - if len(self._graph) == 0: - was_empty = True - else: - for r in self._graph.nodes_iter(): - try: - if r.cancel(blocking=False): - cancelled += 1 - except exc.InvalidStateException: - pass - if cancelled or was_empty: - self._change_state(None, states.CANCELLED) - - return cancelled - - def _find_uuid(self, uuid): - # Finds the runner for the given uuid (or returns none) - for r in self._graph.nodes_iter(): - if r.uuid == uuid: - return r - return None - - def add(self, task, timeout=None, infer=True): - """Adds a task to the given flow using the given timeout which will be - used a the timeout to wait for dependencies (if any) to be - fulfilled. - """ - def check(): - if self.state not in self.MUTABLE_STATES: - raise exc.InvalidStateException("Flow is currently in a" - " non-mutable %s state" % - (self.state)) - - # Ensure that we do a quick check to see if we can even perform this - # addition before we go about actually acquiring the lock to perform - # the actual addition. - check() - - # All locks must be acquired so that modifications can not be made - # while running, cancelling or performing a simultaneous mutation. - with utils.MultiLock(self._core_locks): - check() - runner = ThreadRunner(task, self, timeout) - self._graph.add_node(runner, infer=infer) - return runner.uuid - - def _connect(self): - """Infers and connects the edges of the given tasks by examining the - associated tasks provides and requires attributes and connecting tasks - that require items to tasks that produce said items. - """ - - # Disconnect all edges not manually created before we attempt to infer - # them so that we don't retain edges that are invalid. - def disconnect_non_user(u, v, e_data): - if e_data and e_data.get('reason') != 'manual': - return True - return False - - # Link providers to requirers. - graph_utils.connect(self._graph, - discard_func=disconnect_non_user) - - # Connect the successors & predecessors and related siblings - for r in self._graph.nodes_iter(): - r._predecessors = [] - r._successors = [] - for (r2, _me) in self._graph.in_edges_iter([r]): - r._predecessors.append(r2) - for (_me, r2) in self._graph.out_edges_iter([r]): - r._successors.append(r2) - r.siblings = [] - for r2 in self._graph.nodes_iter(): - if r2 is r or r2 in r._predecessors or r2 in r._successors: - continue - r._siblings.append(r2) - - def add_many(self, tasks): - """Adds a list of tasks to the flow.""" - - def check(): - if self.state not in self.MUTABLE_STATES: - raise exc.InvalidStateException("Flow is currently in a" - " non-mutable state %s" - % (self.state)) - - # Ensure that we do a quick check to see if we can even perform this - # addition before we go about actually acquiring the lock. - check() - - # All locks must be acquired so that modifications can not be made - # while running, cancelling or performing a simultaneous mutation. - with utils.MultiLock(self._core_locks): - check() - added = [] - for t in tasks: - added.append(self.add(t)) - return added - - def add_dependency(self, provider_uuid, consumer_uuid): - """Manually adds a dependency between a provider and a consumer.""" - - def check_and_fetch(): - if self.state not in self.MUTABLE_STATES: - raise exc.InvalidStateException("Flow is currently in a" - " non-mutable state %s" - % (self.state)) - provider = self._find_uuid(provider_uuid) - if not provider or not self._graph.has_node(provider): - raise exc.InvalidStateException("Can not add a dependency " - "from unknown uuid %s" % - (provider_uuid)) - consumer = self._find_uuid(consumer_uuid) - if not consumer or not self._graph.has_node(consumer): - raise exc.InvalidStateException("Can not add a dependency " - "to unknown uuid %s" - % (consumer_uuid)) - if provider is consumer: - raise exc.InvalidStateException("Can not add a dependency " - "to loop via uuid %s" - % (consumer_uuid)) - return (provider, consumer) - - check_and_fetch() - - # All locks must be acquired so that modifications can not be made - # while running, cancelling or performing a simultaneous mutation. - with utils.MultiLock(self._core_locks): - (provider, consumer) = check_and_fetch() - self._graph.add_edge(provider, consumer, reason='manual') - LOG.debug("Connecting %s as a manual provider for %s", - provider, consumer) - - def run(self, context, *args, **kwargs): - """Executes the given flow using the given context and args/kwargs.""" - - def abort_if(current_state, ok_states): - if current_state in (states.CANCELLED,): - return False - if current_state not in ok_states: - return False - return True - - def check(): - if self.state not in self.RUNNABLE_STATES: - raise exc.InvalidStateException("Flow is currently unable " - "to be ran in state %s" - % (self.state)) - - def connect_and_verify(): - """Do basic sanity tests on the graph structure.""" - if len(self._graph) == 0: - return - self._connect() - degrees = [g[1] for g in self._graph.in_degree_iter()] - zero_degrees = [d for d in degrees if d == 0] - if not zero_degrees: - # If every task depends on something else to produce its input - # then we will be in a deadlock situation. - raise exc.InvalidStateException("No task has an in-degree" - " of zero") - self_loops = self._graph.nodes_with_selfloops() - if self_loops: - # A task that has a dependency on itself will never be able - # to run. - raise exc.InvalidStateException("%s tasks have been detected" - " with dependencies on" - " themselves" % - len(self_loops)) - simple_cycles = len(cycles.recursive_simple_cycles(self._graph)) - if simple_cycles: - # A task loop will never be able to run, unless it somehow - # breaks that loop. - raise exc.InvalidStateException("%s tasks have been detected" - " with dependency loops" % - simple_cycles) - - def run_it(result_cb, args, kwargs): - check_runnable = functools.partial(abort_if, - ok_states=self.RUNNABLE_STATES) - if self._change_state(context, states.RUNNING, - check_func=check_runnable): - self.results = {} - if len(self._graph) == 0: - return - for r in self._graph.nodes_iter(): - r.reset() - r._result_cb = result_cb - executor = utils.ThreadGroupExecutor() - for r in self._graph.nodes_iter(): - executor.submit(r, *args, **kwargs) - executor.await_termination() - - def trigger_rollback(failures): - if not failures: - return - causes = [] - for r in failures: - causes.append(utils.FlowFailure(r, self, - r.exc, r.exc_info)) - try: - self.rollback(context, causes) - except exc.InvalidStateException: - pass - finally: - # TODO(harlowja): re-raise a combined exception when - # there are more than one failures?? - for f in failures: - if all(f.exc_info): - raise f.exc_info[0], f.exc_info[1], f.exc_info[2] - - def handle_results(): - # Isolate each runner state into groups so that we can easily tell - # which ones failed, cancelled, completed... - groups = collections.defaultdict(list) - for r in self._graph.nodes_iter(): - groups[r.state].append(r) - for r in self._graph.nodes_iter(): - if r not in groups.get(states.FAILURE, []) and r.has_ran(): - self.results[r.uuid] = r.result - if groups[states.FAILURE]: - self._change_state(context, states.FAILURE) - trigger_rollback(groups[states.FAILURE]) - elif (groups[states.CANCELLED] or groups[states.PENDING] - or groups[states.TIMED_OUT] or groups[states.STARTED]): - self._change_state(context, states.INCOMPLETE) - else: - check_ran = functools.partial(abort_if, - ok_states=[states.RUNNING]) - self._change_state(context, states.SUCCESS, - check_func=check_ran) - - def get_resumer_cb(): - if not self.resumer: - return None - (ran, _others) = self.resumer(self, self._graph.nodes_iter()) - - def fetch_results(runner): - for (r, metadata) in ran: - if r is runner: - return (True, metadata.get('result')) - return (False, None) - - result_cb = fetch_results - return result_cb - - args = [context] + list(args) - check() - - # Only acquire the run lock (but use further state checking) and the - # mutation lock to stop simultaneous running and simultaneous mutating - # which are not allowed on a running flow. Allow simultaneous cancel - # by performing repeated state checking while running. - with utils.MultiLock(self._run_locks): - check() - connect_and_verify() - try: - run_it(get_resumer_cb(), args, kwargs) - finally: - handle_results() - - def rollback(self, context, cause): - """Rolls back all tasks that are *not* still pending or cancelled.""" - - def check(): - if self.state not in self.REVERTABLE_STATES: - raise exc.InvalidStateException("Flow is currently unable " - "to be rolled back in " - "state %s" % (self.state)) - - check() - - # All locks must be acquired so that modifications can not be made - # while another entity is running, rolling-back, cancelling or - # performing a mutation operation. - with utils.MultiLock(self._core_locks): - check() - accum = utils.RollbackAccumulator() - for r in self._graph.nodes_iter(): - if r.has_ran(): - accum.add(utils.RollbackTask(context, r.task, r.result)) - try: - self._change_state(context, states.REVERTING) - accum.rollback(cause) - finally: - self._change_state(context, states.FAILURE) - - -class ThreadRunner(utils.Runner): - """A helper class that will use a countdown latch to avoid calling its - callable object until said countdown latch has emptied. After it has - been emptied the predecessor tasks will be examined for dependent results - and said results will then be provided to call the runners callable - object. - - TODO(harlowja): this could be a 'future' like object in the future since it - is starting to have the same purpose and usage (in a way). Likely switch - this over to the task details object or a subclass of it??? - """ - RESETTABLE_STATES = set([states.PENDING, states.SUCCESS, states.FAILURE, - states.CANCELLED]) - RUNNABLE_STATES = set([states.PENDING]) - CANCELABLE_STATES = set([states.PENDING]) - SUCCESS_STATES = set([states.SUCCESS]) - CANCEL_SUCCESSORS_WHEN = set([states.FAILURE, states.CANCELLED, - states.TIMED_OUT]) - NO_RAN_STATES = set([states.CANCELLED, states.PENDING, states.TIMED_OUT, - states.RUNNING]) - - def __init__(self, task, flow, timeout): - super(ThreadRunner, self).__init__(task) - # Use weak references to give the GC a break. - self._flow = weakref.proxy(flow) - self._notifier = flow.task_notifier - self._timeout = timeout - self._state = states.PENDING - self._run_lock = threading.RLock() - # Use the flows state lock so that state notifications are not sent - # simultaneously for a given flow. - self._state_lock = flow._state_lock - self._cancel_lock = threading.RLock() - self._latch = utils.CountDownLatch() - # Any related family. - self._predecessors = [] - self._successors = [] - self._siblings = [] - # Ensure we capture any exceptions that may have been triggered. - self.exc = None - self.exc_info = (None, None, None) - # This callback will be called before the underlying task is actually - # returned and it should either return a tuple of (has_result, result) - self._result_cb = None - - @property - def state(self): - return self._state - - def has_ran(self): - if self.state in self.NO_RAN_STATES: - return False - return True - - def _change_state(self, context, new_state): - old_state = None - changed = False - with self._state_lock: - if self.state != new_state: - old_state = self.state - self._state = new_state - changed = True - # Don't notify while holding the lock so that the reciever of said - # notifications can actually perform operations on the given runner - # without getting into deadlock. - if changed and self._notifier: - self._notifier.notify(self.state, details={ - 'context': context, - 'flow': self._flow, - 'old_state': old_state, - 'runner': self, - }) - - def cancel(self, blocking=True): - - def check(): - if self.state not in self.CANCELABLE_STATES: - raise exc.InvalidStateException("Runner not in a cancelable" - " state: %s" % (self.state)) - - # Check before as a quick way out of attempting to acquire the more - # heavy-weight lock. Then acquire the lock (which should not be - # possible if we are currently running) and set the state (if still - # applicable). - check() - acquired = False - cancelled = False - try: - acquired = self._cancel_lock.acquire(blocking=blocking) - if acquired: - check() - cancelled = True - self._change_state(None, states.CANCELLED) - finally: - if acquired: - self._cancel_lock.release() - return cancelled - - def reset(self): - - def check(): - if self.state not in self.RESETTABLE_STATES: - raise exc.InvalidStateException("Runner not in a resettable" - " state: %s" % (self.state)) - - def do_reset(): - self._latch.count = len(self._predecessors) - self.exc = None - self.exc_info = (None, None, None) - self.result = None - self._change_state(None, states.PENDING) - - # We need to acquire both locks here so that we can not be running - # or being cancelled at the same time we are resetting. - check() - with self._run_lock: - check() - with self._cancel_lock: - check() - do_reset() - - @property - def runs_before(self): - # NOTE(harlowja): this list may change, depending on which other - # runners have completed (or are currently actively running), so - # this is why this is a property instead of a semi-static defined list - # like in the AOT class. The list should only get bigger and not - # smaller so it should be fine to filter on runners that have completed - # successfully. - finished_ok = [] - for r in self._siblings: - if r.has_ran() and r.state in self.SUCCESS_STATES: - finished_ok.append(r) - return finished_ok - - def __call__(self, context, *args, **kwargs): - - def is_runnable(): - if self.state not in self.RUNNABLE_STATES: - return False - return True - - def run(*args, **kwargs): - try: - self._change_state(context, states.RUNNING) - has_result = False - if self._result_cb: - has_result, self.result = self._result_cb(self) - if not has_result: - super(ThreadRunner, self).__call__(*args, **kwargs) - self._change_state(context, states.SUCCESS) - except Exception as e: - self._change_state(context, states.FAILURE) - self.exc = e - self.exc_info = sys.exc_info() - - def signal(): - if not self._successors: - return - if self.state in self.CANCEL_SUCCESSORS_WHEN: - for r in self._successors: - try: - r.cancel(blocking=False) - except exc.InvalidStateException: - pass - for r in self._successors: - try: - r._latch.countDown() - except Exception: - LOG.exception("Failed decrementing %s latch", r) - - # We check before to avoid attempting to acquire the lock when we are - # known to be in a non-runnable state. - if not is_runnable(): - return - args = [context] + list(args) - with self._run_lock: - # We check after we now own the run lock since a previous thread - # could have exited and released that lock and set the state to - # not runnable. - if not is_runnable(): - return - may_proceed = self._latch.await(self._timeout) - # We now acquire the cancel lock so that we can be assured that - # we have not been cancelled by another entity. - with self._cancel_lock: - try: - # If we have been cancelled after awaiting and timing out - # ensure that we alter the state to show timed out (but - # not if we have been cancelled, since our state should - # be cancelled instead). This is done after acquiring the - # cancel lock so that we will not try to overwrite another - # entity trying to set the runner to the cancel state. - if not may_proceed and self.state != states.CANCELLED: - self._change_state(context, states.TIMED_OUT) - # We at this point should only have been able to time out - # or be cancelled, no other state transitions should have - # been possible. - if self.state not in (states.CANCELLED, states.TIMED_OUT): - run(*args, **kwargs) - finally: - signal() diff --git a/billingstack/taskflow/states.py b/billingstack/taskflow/states.py deleted file mode 100644 index b3ff929..0000000 --- a/billingstack/taskflow/states.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Job states. -CLAIMED = 'CLAIMED' -FAILURE = 'FAILURE' -PENDING = 'PENDING' -RUNNING = 'RUNNING' -SUCCESS = 'SUCCESS' -UNCLAIMED = 'UNCLAIMED' - -# Flow states. -FAILURE = FAILURE -INTERRUPTED = 'INTERRUPTED' -PENDING = 'PENDING' -RESUMING = 'RESUMING' -REVERTING = 'REVERTING' -RUNNING = RUNNING -STARTED = 'STARTED' -SUCCESS = SUCCESS -CANCELLED = 'CANCELLED' -INCOMPLETE = 'INCOMPLETE' - -# Task states. -FAILURE = FAILURE -STARTED = STARTED -SUCCESS = SUCCESS -TIMED_OUT = 'TIMED_OUT' -CANCELLED = CANCELLED diff --git a/billingstack/taskflow/task.py b/billingstack/taskflow/task.py deleted file mode 100644 index 4a88c54..0000000 --- a/billingstack/taskflow/task.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2013 Rackspace Hosting Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -from billingstack.openstack.common import uuidutils -from billingstack.taskflow import utils - - -class Task(object): - """An abstraction that defines a potential piece of work that can be - applied and can be reverted to undo the work as a single unit. - """ - __metaclass__ = abc.ABCMeta - - def __init__(self, name, task_id=None): - if task_id: - self._uuid = task_id - else: - self._uuid = uuidutils.generate_uuid() - self._name = name - # An *immutable* input 'resource' name set this task depends - # on existing before this task can be applied. - self.requires = set() - # An *immutable* input 'resource' name set this task would like to - # depends on existing before this task can be applied (but does not - # strongly depend on existing). - self.optional = set() - # An *immutable* output 'resource' name set this task - # produces that other tasks may depend on this task providing. - self.provides = set() - # This identifies the version of the task to be ran which - # can be useful in resuming older versions of tasks. Standard - # major, minor version semantics apply. - self.version = (1, 0) - - @property - def uuid(self): - return self._uuid - - @property - def name(self): - return self._name - - def __str__(self): - return "%s==%s" % (self.name, utils.get_task_version(self)) - - @abc.abstractmethod - def __call__(self, context, *args, **kwargs): - """Activate a given task which will perform some operation and return. - - This method can be used to apply some given context and given set - of args and kwargs to accomplish some goal. Note that the result - that is returned needs to be serializable so that it can be passed - back into this task if reverting is triggered. - """ - - def revert(self, context, result, cause): - """Revert this task using the given context, result that the apply - provided as well as any information which may have caused - said reversion. - """ diff --git a/billingstack/taskflow/utils.py b/billingstack/taskflow/utils.py deleted file mode 100644 index 686b2d9..0000000 --- a/billingstack/taskflow/utils.py +++ /dev/null @@ -1,532 +0,0 @@ -# -*- coding: utf-8 -*- - -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. -# Copyright (C) 2013 Rackspace Hosting All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import collections -import contextlib -import copy -import logging -import re -import sys -import threading -import threading2 -import time - -from billingstack.openstack.common import uuidutils - -TASK_FACTORY_ATTRIBUTE = '_TaskFlow_task_factory' -LOG = logging.getLogger(__name__) - - -def await(check_functor, timeout=None): - if timeout is not None: - end_time = time.time() + max(0, timeout) - else: - end_time = None - # Use the same/similar scheme that the python condition class uses. - delay = 0.0005 - while not check_functor(): - time.sleep(delay) - if end_time is not None: - remaining = end_time - time.time() - if remaining <= 0: - return False - delay = min(delay * 2, remaining, 0.05) - else: - delay = min(delay * 2, 0.05) - return True - - -def get_task_version(task): - """Gets a tasks *string* version, whether it is a task object/function.""" - task_version = getattr(task, 'version') - if isinstance(task_version, (list, tuple)): - task_version = '.'.join(str(item) for item in task_version) - if task_version is not None and not isinstance(task_version, basestring): - task_version = str(task_version) - return task_version - - -def is_version_compatible(version_1, version_2): - """Checks for major version compatibility of two *string" versions.""" - if version_1 == version_2: - # Equivalent exactly, so skip the rest. - return True - - def _convert_to_pieces(version): - try: - pieces = [] - for p in version.split("."): - p = p.strip() - if not len(p): - pieces.append(0) - continue - # Clean off things like 1alpha, or 2b and just select the - # digit that starts that entry instead. - p_match = re.match(r"(\d+)([A-Za-z]*)(.*)", p) - if p_match: - p = p_match.group(1) - pieces.append(int(p)) - except (AttributeError, TypeError, ValueError): - pieces = [] - return pieces - - version_1_pieces = _convert_to_pieces(version_1) - version_2_pieces = _convert_to_pieces(version_2) - if len(version_1_pieces) == 0 or len(version_2_pieces) == 0: - return False - - # Ensure major version compatibility to start. - major1 = version_1_pieces[0] - major2 = version_2_pieces[0] - if major1 != major2: - return False - return True - - -class MultiLock(object): - """A class which can attempt to obtain many locks at once and release - said locks when exiting. - - Useful as a context manager around many locks (instead of having to nest - said individual context managers). - """ - - def __init__(self, locks): - assert len(locks) > 0, "Zero locks requested" - self._locks = locks - self._locked = [False] * len(locks) - - def __enter__(self): - - def is_locked(lock): - # NOTE(harlowja): the threading2 lock doesn't seem to have this - # attribute, so thats why we are checking it existing first. - if hasattr(lock, 'locked'): - return lock.locked() - return False - - for i in xrange(0, len(self._locked)): - if self._locked[i] or is_locked(self._locks[i]): - raise threading.ThreadError("Lock %s not previously released" - % (i + 1)) - self._locked[i] = False - for (i, lock) in enumerate(self._locks): - self._locked[i] = lock.acquire() - - def __exit__(self, type, value, traceback): - for (i, locked) in enumerate(self._locked): - try: - if locked: - self._locks[i].release() - self._locked[i] = False - except threading.ThreadError: - LOG.exception("Unable to release lock %s", i + 1) - - -class CountDownLatch(object): - """Similar in concept to the java count down latch.""" - - def __init__(self, count=0): - self.count = count - self.lock = threading.Condition() - - def countDown(self): - with self.lock: - self.count -= 1 - if self.count <= 0: - self.lock.notifyAll() - - def await(self, timeout=None): - end_time = None - if timeout is not None: - timeout = max(0, timeout) - end_time = time.time() + timeout - time_up = False - with self.lock: - while True: - # Stop waiting on these 2 conditions. - if time_up or self.count <= 0: - break - # Was this a spurious wakeup or did we really end?? - self.lock.wait(timeout=timeout) - if end_time is not None: - if time.time() >= end_time: - time_up = True - else: - # Reduce the timeout so that we don't wait extra time - # over what we initially were requested to. - timeout = end_time - time.time() - return self.count <= 0 - - -class LastFedIter(object): - """An iterator which yields back the first item and then yields back - results from the provided iterator. - """ - - def __init__(self, first, rest_itr): - self.first = first - self.rest_itr = rest_itr - - def __iter__(self): - yield self.first - for i in self.rest_itr: - yield i - - -class ThreadGroupExecutor(object): - """A simple thread executor that spins up new threads (or greenthreads) for - each task to be completed (no pool limit is enforced). - - TODO(harlowja): Likely if we use the more advanced executors that come with - the concurrent.futures library we can just get rid of this. - """ - - def __init__(self, daemonize=True): - self._threads = [] - self._group = threading2.ThreadGroup() - self._daemonize = daemonize - - def submit(self, fn, *args, **kwargs): - t = threading2.Thread(target=fn, group=self._group, - args=args, kwargs=kwargs) - t.daemon = self._daemonize - self._threads.append(t) - t.start() - - def await_termination(self, timeout=None): - if not self._threads: - return - return self._group.join(timeout) - - -class FlowFailure(object): - """When a task failure occurs the following object will be given to revert - and can be used to interrogate what caused the failure. - """ - - def __init__(self, runner, flow, exc, exc_info=None): - self.runner = runner - self.flow = flow - self.exc = exc - if not exc_info: - self.exc_info = sys.exc_info() - else: - self.exc_info = exc_info - - -class RollbackTask(object): - """A helper task that on being called will call the underlying callable - tasks revert method (if said method exists). - """ - - def __init__(self, context, task, result): - self.task = task - self.result = result - self.context = context - - def __str__(self): - return str(self.task) - - def __call__(self, cause): - if ((hasattr(self.task, "revert") and - isinstance(self.task.revert, collections.Callable))): - self.task.revert(self.context, self.result, cause) - - -class Runner(object): - """A helper class that wraps a task and can find the needed inputs for - the task to run, as well as providing a uuid and other useful functionality - for users of the task. - - TODO(harlowja): replace with the task details object or a subclass of - that??? - """ - - def __init__(self, task, uuid=None): - assert isinstance(task, collections.Callable) - task_factory = getattr(task, TASK_FACTORY_ATTRIBUTE, None) - if task_factory: - self.task = task_factory(task) - else: - self.task = task - self.providers = {} - self.result = None - if not uuid: - self._id = uuidutils.generate_uuid() - else: - self._id = str(uuid) - - @property - def uuid(self): - return str(self._id) - - @property - def requires(self): - return self.task.requires - - @property - def provides(self): - return self.task.provides - - @property - def optional(self): - return self.task.optional - - @property - def runs_before(self): - return [] - - @property - def version(self): - return get_task_version(self.task) - - @property - def name(self): - return self.task.name - - def reset(self): - self.result = None - - def __str__(self): - lines = ["Runner: %s" % (self.name)] - lines.append("%s" % (self.uuid)) - lines.append("%s" % (self.version)) - return "; ".join(lines) - - def __call__(self, *args, **kwargs): - # Find all of our inputs first. - kwargs = dict(kwargs) - for (k, who_made) in self.providers.iteritems(): - if k in kwargs: - continue - try: - kwargs[k] = who_made.result[k] - except (TypeError, KeyError): - pass - optional_keys = self.optional - optional_keys = optional_keys - set(kwargs.keys()) - for k in optional_keys: - for who_ran in self.runs_before: - matched = False - if k in who_ran.provides: - try: - kwargs[k] = who_ran.result[k] - matched = True - except (TypeError, KeyError): - pass - if matched: - break - # Ensure all required keys are either existent or set to none. - for k in self.requires: - if k not in kwargs: - kwargs[k] = None - # And now finally run. - self.result = self.task(*args, **kwargs) - return self.result - - -class AOTRunner(Runner): - """A runner that knows who runs before this runner ahead of time from a - known list of previous runners. - """ - - def __init__(self, task): - super(AOTRunner, self).__init__(task) - self._runs_before = [] - - @property - def runs_before(self): - return self._runs_before - - @runs_before.setter - def runs_before(self, runs_before): - self._runs_before = list(runs_before) - - -class TransitionNotifier(object): - """A utility helper class that can be used to subscribe to - notifications of events occuring as well as allow a entity to post said - notifications to subscribers. - """ - - RESERVED_KEYS = ('details',) - ANY = '*' - - def __init__(self): - self._listeners = collections.defaultdict(list) - - def reset(self): - self._listeners = collections.defaultdict(list) - - def notify(self, state, details): - listeners = list(self._listeners.get(self.ANY, [])) - for i in self._listeners[state]: - if i not in listeners: - listeners.append(i) - if not listeners: - return - for (callback, args, kwargs) in listeners: - if args is None: - args = [] - if kwargs is None: - kwargs = {} - kwargs['details'] = details - try: - callback(state, *args, **kwargs) - except Exception: - LOG.exception(("Failure calling callback %s to notify about" - " state transition %s"), callback, state) - - def register(self, state, callback, args=None, kwargs=None): - assert isinstance(callback, collections.Callable) - for i, (cb, args, kwargs) in enumerate(self._listeners.get(state, [])): - if cb is callback: - raise ValueError("Callback %s already registered" % (callback)) - if kwargs: - for k in self.RESERVED_KEYS: - if k in kwargs: - raise KeyError(("Reserved key '%s' not allowed in " - "kwargs") % k) - kwargs = copy.copy(kwargs) - if args: - args = copy.copy(args) - self._listeners[state].append((callback, args, kwargs)) - - def deregister(self, state, callback): - if state not in self._listeners: - return - for i, (cb, args, kwargs) in enumerate(self._listeners[state]): - if cb is callback: - self._listeners[state].pop(i) - break - - -class RollbackAccumulator(object): - """A utility class that can help in organizing 'undo' like code - so that said code be rolled back on failure (automatically or manually) - by activating rollback callables that were inserted during said codes - progression. - """ - - def __init__(self): - self._rollbacks = [] - - def add(self, *callables): - self._rollbacks.extend(callables) - - def reset(self): - self._rollbacks = [] - - def __len__(self): - return len(self._rollbacks) - - def __enter__(self): - return self - - def rollback(self, cause): - LOG.warn("Activating %s rollbacks due to %s.", len(self), cause) - for (i, f) in enumerate(reversed(self._rollbacks)): - LOG.debug("Calling rollback %s: %s", i + 1, f) - try: - f(cause) - except Exception: - LOG.exception(("Failed rolling back %s: %s due " - "to inner exception."), i + 1, f) - - def __exit__(self, type, value, tb): - if any((value, type, tb)): - self.rollback(value) - - -class ReaderWriterLock(object): - """A simple reader-writer lock. - - Several readers can hold the lock simultaneously, and only one writer. - Write locks have priority over reads to prevent write starvation. - - Public domain @ http://majid.info/blog/a-reader-writer-lock-for-python/ - """ - - def __init__(self): - self.rwlock = 0 - self.writers_waiting = 0 - self.monitor = threading.Lock() - self.readers_ok = threading.Condition(self.monitor) - self.writers_ok = threading.Condition(self.monitor) - - @contextlib.contextmanager - def acquire(self, read=True): - """Acquire a read or write lock in a context manager.""" - try: - if read: - self.acquire_read() - else: - self.acquire_write() - yield self - finally: - self.release() - - def acquire_read(self): - """Acquire a read lock. - - Several threads can hold this typeof lock. - It is exclusive with write locks. - """ - - self.monitor.acquire() - while self.rwlock < 0 or self.writers_waiting: - self.readers_ok.wait() - self.rwlock += 1 - self.monitor.release() - - def acquire_write(self): - """Acquire a write lock. - - Only one thread can hold this lock, and only when no read locks - are also held. - """ - - self.monitor.acquire() - while self.rwlock != 0: - self.writers_waiting += 1 - self.writers_ok.wait() - self.writers_waiting -= 1 - self.rwlock = -1 - self.monitor.release() - - def release(self): - """Release a lock, whether read or write.""" - - self.monitor.acquire() - if self.rwlock < 0: - self.rwlock = 0 - else: - self.rwlock -= 1 - wake_writers = self.writers_waiting and self.rwlock == 0 - wake_readers = self.writers_waiting == 0 - self.monitor.release() - if wake_writers: - self.writers_ok.acquire() - self.writers_ok.notify() - self.writers_ok.release() - elif wake_readers: - self.readers_ok.acquire() - self.readers_ok.notifyAll() - self.readers_ok.release() diff --git a/billingstack/tasks.py b/billingstack/tasks.py index f23c35c..f1f30a5 100644 --- a/billingstack/tasks.py +++ b/billingstack/tasks.py @@ -13,15 +13,17 @@ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. +from taskflow import task + from billingstack.openstack.common import log from billingstack.openstack.common.gettextutils import _ -from billingstack.taskflow import task LOG = log.getLogger(__name__) -def _make_task_name(cls, prefix="default", addons=None): +def _make_task_name(cls, prefix=None, addons=None): + prefix = prefix or 'default' components = [cls.__module__, cls.__name__] if addons: for a in addons: @@ -58,28 +60,7 @@ def task_log_change(state, details): class RootTask(task.Task): - def __init__(self, name=None, **kw): - name = name or _make_task_name(self.__class__, **kw) - super(RootTask, self).__init__(name) - - -class ValuesInjectTask(RootTask): - """ - This injects a dict into the flow. - - This injection is done so that the keys (and values) provided can be - dependended on by tasks further down the line. Since taskflow is dependency - based this can be considered the bootstrapping task that provides an - initial set of values for other tasks to get started with. If this did not - exist then tasks would fail locating there dependent tasks and the values - said dependent tasks produce. - - Reversion strategy: N/A - """ - def __init__(self, values, **kw): - super(ValuesInjectTask, self).__init__(**kw) - self.provides.update(values.keys()) - self._values = values - - def __call__(self, context): - return dict(self._values) + def __init__(self, name=None, prefix=None, addons=None, **kw): + name = name or _make_task_name(self.__class__, prefix=prefix, + addons=addons) + super(RootTask, self).__init__(name, **kw) diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst index 0346de7..1a7f283 100644 --- a/doc/source/install/manual.rst +++ b/doc/source/install/manual.rst @@ -39,7 +39,7 @@ Common Steps :: - $ git clone https://github.com/billingstack/billingstack.git + $ git clone https://github.com/stackforge/billingstack.git $ cd billingstack 3. Setup virtualenv and Install BillingStack and it's dependencies @@ -131,4 +131,4 @@ Installing the API ... - 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file + 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..580c5ee --- /dev/null +++ b/requirements.txt @@ -0,0 +1,28 @@ +Babel>=1.3 +pbr>=0.5.21,<1.0 +# This file is managed by openstack-depends +argparse +cliff>=1.4.3 +eventlet>=0.13.0 +extras +pecan>=0.2.0 +iso8601>=0.1.8 +netaddr>=0.7.6 +oslo.config>=1.2.0 +Paste +PasteDeploy>=1.5.0 +Routes>=1.12.3 +stevedore>=0.10 +WebOb>=1.2.3,<1.3 +WSME>=0.5b6 +# Optional Stuff that is used by default +alembic>=0.4.1 +SQLAlchemy>=0.7.8,<=0.7.99 +kombu>=2.4.8 + +# Identity +python-memcached>=1.48 +passlib + +pycountry +taskflow diff --git a/setup.py b/setup.py index 2a0786a..70c2b3f 100644 --- a/setup.py +++ b/setup.py @@ -18,5 +18,5 @@ import setuptools setuptools.setup( - setup_requires=['pbr>=0.5.21,<1.0'], + setup_requires=['pbr'], pbr=True) diff --git a/taskflow.conf b/taskflow.conf deleted file mode 100644 index d71164a..0000000 --- a/taskflow.conf +++ /dev/null @@ -1,7 +0,0 @@ -[DEFAULT] - -# The list of primitives to copy from taskflow -primitives=flow.threaded_flow,flow.linear_flow,task - -# The base module to hold the copy of taskflow -base=billingstack diff --git a/tools/test-requires b/test-requirements.txt similarity index 100% rename from tools/test-requires rename to test-requirements.txt diff --git a/tools/colorizer.py b/tools/colorizer.py deleted file mode 100755 index aa7427e..0000000 --- a/tools/colorizer.py +++ /dev/null @@ -1,333 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2013, Nebula, Inc. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Colorizer Code is borrowed from Twisted: -# Copyright (c) 2001-2010 Twisted Matrix Laboratories. -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -"""Display a subunit stream through a colorized unittest test runner.""" - -import heapq -import subunit -import sys -import unittest - -import testtools - - -class _AnsiColorizer(object): - """ - A colorizer is an object that loosely wraps around a stream, allowing - callers to write text to the stream in a particular color. - - Colorizer classes must implement C{supported()} and C{write(text, color)}. - """ - _colors = dict(black=30, red=31, green=32, yellow=33, - blue=34, magenta=35, cyan=36, white=37) - - def __init__(self, stream): - self.stream = stream - - def supported(cls, stream=sys.stdout): - """ - A class method that returns True if the current platform supports - coloring terminal output using this method. Returns False otherwise. - """ - if not stream.isatty(): - return False # auto color only on TTYs - try: - import curses - except ImportError: - return False - else: - try: - try: - return curses.tigetnum("colors") > 2 - except curses.error: - curses.setupterm() - return curses.tigetnum("colors") > 2 - except Exception: - # guess false in case of error - return False - supported = classmethod(supported) - - def write(self, text, color): - """ - Write the given text to the stream in the given color. - - @param text: Text to be written to the stream. - - @param color: A string label for a color. e.g. 'red', 'white'. - """ - color = self._colors[color] - self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text)) - - -class _Win32Colorizer(object): - """ - See _AnsiColorizer docstring. - """ - def __init__(self, stream): - import win32console - red, green, blue, bold = (win32console.FOREGROUND_RED, - win32console.FOREGROUND_GREEN, - win32console.FOREGROUND_BLUE, - win32console.FOREGROUND_INTENSITY) - self.stream = stream - self.screenBuffer = win32console.GetStdHandle( - win32console.STD_OUT_HANDLE) - self._colors = { - 'normal': red | green | blue, - 'red': red | bold, - 'green': green | bold, - 'blue': blue | bold, - 'yellow': red | green | bold, - 'magenta': red | blue | bold, - 'cyan': green | blue | bold, - 'white': red | green | blue | bold - } - - def supported(cls, stream=sys.stdout): - try: - import win32console - screenBuffer = win32console.GetStdHandle( - win32console.STD_OUT_HANDLE) - except ImportError: - return False - import pywintypes - try: - screenBuffer.SetConsoleTextAttribute( - win32console.FOREGROUND_RED | - win32console.FOREGROUND_GREEN | - win32console.FOREGROUND_BLUE) - except pywintypes.error: - return False - else: - return True - supported = classmethod(supported) - - def write(self, text, color): - color = self._colors[color] - self.screenBuffer.SetConsoleTextAttribute(color) - self.stream.write(text) - self.screenBuffer.SetConsoleTextAttribute(self._colors['normal']) - - -class _NullColorizer(object): - """ - See _AnsiColorizer docstring. - """ - def __init__(self, stream): - self.stream = stream - - def supported(cls, stream=sys.stdout): - return True - supported = classmethod(supported) - - def write(self, text, color): - self.stream.write(text) - - -def get_elapsed_time_color(elapsed_time): - if elapsed_time > 1.0: - return 'red' - elif elapsed_time > 0.25: - return 'yellow' - else: - return 'green' - - -class NovaTestResult(testtools.TestResult): - def __init__(self, stream, descriptions, verbosity): - super(NovaTestResult, self).__init__() - self.stream = stream - self.showAll = verbosity > 1 - self.num_slow_tests = 10 - self.slow_tests = [] # this is a fixed-sized heap - self.colorizer = None - # NOTE(vish): reset stdout for the terminal check - stdout = sys.stdout - sys.stdout = sys.__stdout__ - for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]: - if colorizer.supported(): - self.colorizer = colorizer(self.stream) - break - sys.stdout = stdout - self.start_time = None - self.last_time = {} - self.results = {} - self.last_written = None - - def _writeElapsedTime(self, elapsed): - color = get_elapsed_time_color(elapsed) - self.colorizer.write(" %.2f" % elapsed, color) - - def _addResult(self, test, *args): - try: - name = test.id() - except AttributeError: - name = 'Unknown.unknown' - test_class, test_name = name.rsplit('.', 1) - - elapsed = (self._now() - self.start_time).total_seconds() - item = (elapsed, test_class, test_name) - if len(self.slow_tests) >= self.num_slow_tests: - heapq.heappushpop(self.slow_tests, item) - else: - heapq.heappush(self.slow_tests, item) - - self.results.setdefault(test_class, []) - self.results[test_class].append((test_name, elapsed) + args) - self.last_time[test_class] = self._now() - self.writeTests() - - def _writeResult(self, test_name, elapsed, long_result, color, - short_result, success): - if self.showAll: - self.stream.write(' %s' % str(test_name).ljust(66)) - self.colorizer.write(long_result, color) - if success: - self._writeElapsedTime(elapsed) - self.stream.writeln() - else: - self.colorizer.write(short_result, color) - - def addSuccess(self, test): - super(NovaTestResult, self).addSuccess(test) - self._addResult(test, 'OK', 'green', '.', True) - - def addFailure(self, test, err): - super(NovaTestResult, self).addFailure(test, err) - self._addResult(test, 'FAIL', 'red', 'F', False) - - def addError(self, test, err): - super(NovaTestResult, self).addFailure(test, err) - self._addResult(test, 'ERROR', 'red', 'E', False) - - def addSkip(self, test, reason=None, details=None): - super(NovaTestResult, self).addSkip(test, reason, details) - self._addResult(test, 'SKIP', 'blue', 'S', True) - - def startTest(self, test): - self.start_time = self._now() - super(NovaTestResult, self).startTest(test) - - def writeTestCase(self, cls): - if not self.results.get(cls): - return - if cls != self.last_written: - self.colorizer.write(cls, 'white') - self.stream.writeln() - for result in self.results[cls]: - self._writeResult(*result) - del self.results[cls] - self.stream.flush() - self.last_written = cls - - def writeTests(self): - time = self.last_time.get(self.last_written, self._now()) - if not self.last_written or (self._now() - time).total_seconds() > 2.0: - diff = 3.0 - while diff > 2.0: - classes = self.results.keys() - oldest = min(classes, key=lambda x: self.last_time[x]) - diff = (self._now() - self.last_time[oldest]).total_seconds() - self.writeTestCase(oldest) - else: - self.writeTestCase(self.last_written) - - def done(self): - self.stopTestRun() - - def stopTestRun(self): - for cls in list(self.results.iterkeys()): - self.writeTestCase(cls) - self.stream.writeln() - self.writeSlowTests() - - def writeSlowTests(self): - # Pare out 'fast' tests - slow_tests = [item for item in self.slow_tests - if get_elapsed_time_color(item[0]) != 'green'] - if slow_tests: - slow_total_time = sum(item[0] for item in slow_tests) - slow = ("Slowest %i tests took %.2f secs:" - % (len(slow_tests), slow_total_time)) - self.colorizer.write(slow, 'yellow') - self.stream.writeln() - last_cls = None - # sort by name - for elapsed, cls, name in sorted(slow_tests, - key=lambda x: x[1] + x[2]): - if cls != last_cls: - self.colorizer.write(cls, 'white') - self.stream.writeln() - last_cls = cls - self.stream.write(' %s' % str(name).ljust(68)) - self._writeElapsedTime(elapsed) - self.stream.writeln() - - def printErrors(self): - if self.showAll: - self.stream.writeln() - self.printErrorList('ERROR', self.errors) - self.printErrorList('FAIL', self.failures) - - def printErrorList(self, flavor, errors): - for test, err in errors: - self.colorizer.write("=" * 70, 'red') - self.stream.writeln() - self.colorizer.write(flavor, 'red') - self.stream.writeln(": %s" % test.id()) - self.colorizer.write("-" * 70, 'red') - self.stream.writeln() - self.stream.writeln("%s" % err) - - -test = subunit.ProtocolTestCase(sys.stdin, passthrough=None) - -if sys.version_info[0:2] <= (2, 6): - runner = unittest.TextTestRunner(verbosity=2) -else: - runner = unittest.TextTestRunner(verbosity=2, resultclass=NovaTestResult) - -if runner.run(test).wasSuccessful(): - exit_code = 0 -else: - exit_code = 1 -sys.exit(exit_code) diff --git a/tools/install_venv.py b/tools/install_venv.py deleted file mode 100644 index 096a95b..0000000 --- a/tools/install_venv.py +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Copyright 2010 OpenStack Foundation. -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Installation script for BillingStack's development virtualenv -""" - -import os -import subprocess -import sys - -import install_venv_common as install_venv - - -def print_help(): - help = """ - BillingStack development environment setup is complete. - - BillingStack development uses virtualenv to track and manage Python dependencies - while in development and testing. - - To activate the BillingStack virtualenv for the extent of your current shell - session you can run: - - $ source .venv/bin/activate - - Or, if you prefer, you can run commands in the virtualenv on a case by case - basis by running: - - $ tools/with_venv.sh - - Also, make test will automatically use the virtualenv. - """ - print help - - -def main(argv): - root = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - venv = os.path.join(root, '.venv') - pip_requires = os.path.join(root, 'tools', 'pip-requires') - pip_options = os.path.join(root, 'tools', 'pip-options') - test_requires = os.path.join(root, 'tools', 'test-requires') - py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1]) - project = 'quantum' - install = install_venv.InstallVenv(root, venv, pip_requires, pip_options, test_requires, - py_version, project) - options = install.parse_args(argv) - install.check_python_version() - install.check_dependencies() - install.create_virtualenv(no_site_packages=options.no_site_packages) - install.install_dependencies() - install.post_process() - print_help() - - -if __name__ == '__main__': - main(sys.argv) diff --git a/tools/install_venv_common.py b/tools/install_venv_common.py deleted file mode 100644 index 8123f89..0000000 --- a/tools/install_venv_common.py +++ /dev/null @@ -1,224 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Provides methods needed by installation script for OpenStack development -virtual environments. - -Synced in from openstack-common -""" - -import argparse -import os -import subprocess -import sys - - -class InstallVenv(object): - - def __init__(self, root, venv, pip_requires, pip_options, test_requires, - py_version, project): - self.root = root - self.venv = venv - self.pip_requires = pip_requires - self.pip_options = pip_options - self.test_requires = test_requires - self.py_version = py_version - self.project = project - - def die(self, message, *args): - print >> sys.stderr, message % args - sys.exit(1) - - def check_python_version(self): - if sys.version_info < (2, 6): - self.die("Need Python Version >= 2.6") - - def run_command_with_code(self, cmd, redirect_output=True, - check_exit_code=True): - """Runs a command in an out-of-process shell. - - Returns the output of that command. Working directory is self.root. - """ - if redirect_output: - stdout = subprocess.PIPE - else: - stdout = None - - proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout) - output = proc.communicate()[0] - if check_exit_code and proc.returncode != 0: - self.die('Command "%s" failed.\n%s', ' '.join(cmd), output) - return (output, proc.returncode) - - def run_command(self, cmd, redirect_output=True, check_exit_code=True): - return self.run_command_with_code(cmd, redirect_output, - check_exit_code)[0] - - def get_distro(self): - if (os.path.exists('/etc/fedora-release') or - os.path.exists('/etc/redhat-release')): - return Fedora(self.root, self.venv, self.pip_requires, - self.pip_options, self.test_requires, - self.py_version, self.project) - else: - return Distro(self.root, self.venv, self.pip_requires, - self.pip_options, self.test_requires, - self.py_version, self.project) - - def check_dependencies(self): - self.get_distro().install_virtualenv() - - def create_virtualenv(self, no_site_packages=True): - """Creates the virtual environment and installs PIP. - - Creates the virtual environment and installs PIP only into the - virtual environment. - """ - if not os.path.isdir(self.venv): - print 'Creating venv...', - if no_site_packages: - self.run_command(['virtualenv', '-q', '--no-site-packages', - self.venv]) - else: - self.run_command(['virtualenv', '-q', self.venv]) - print 'done.' - print 'Installing pip in venv...', - if not self.run_command(['tools/with_venv.sh', 'easy_install', - 'pip>1.0']).strip(): - self.die("Failed to install pip.") - print 'done.' - else: - print "venv already exists..." - pass - - def pip_install(self, *args): - self.run_command(['tools/with_venv.sh', - 'pip', 'install', '--upgrade'] + list(args), - redirect_output=False) - - def install_dependencies(self): - print 'Installing dependencies with pip (this can take a while)...' - - # First things first, make sure our venv has the latest pip and - # distribute. - # NOTE: we keep pip at version 1.1 since the most recent version causes - # the .venv creation to fail. See: - # https://bugs.launchpad.net/nova/+bug/1047120 - self.pip_install('pip==1.1') - self.pip_install('distribute') - - # Install greenlet by hand - just listing it in the requires file does - # not - # get it installed in the right order - self.pip_install('greenlet') - - self.pip_install('-r', self.pip_requires) - self.pip_install('-r', self.pip_options) - self.pip_install('-r', self.test_requires) - - def post_process(self): - self.get_distro().post_process() - - def parse_args(self, argv): - """Parses command-line arguments.""" - parser = argparse.ArgumentParser() - parser.add_argument('-n', '--no-site-packages', - action='store_true', - help="Do not inherit packages from global Python " - "install") - return parser.parse_args(argv[1:]) - - -class Distro(InstallVenv): - - def check_cmd(self, cmd): - return bool(self.run_command(['which', cmd], - check_exit_code=False).strip()) - - def install_virtualenv(self): - if self.check_cmd('virtualenv'): - return - - if self.check_cmd('easy_install'): - print 'Installing virtualenv via easy_install...', - if self.run_command(['easy_install', 'virtualenv']): - print 'Succeeded' - return - else: - print 'Failed' - - self.die('ERROR: virtualenv not found.\n\n%s development' - ' requires virtualenv, please install it using your' - ' favorite package management tool' % self.project) - - def post_process(self): - """Any distribution-specific post-processing gets done here. - - In particular, this is useful for applying patches to code inside - the venv. - """ - pass - - -class Fedora(Distro): - """This covers all Fedora-based distributions. - - Includes: Fedora, RHEL, CentOS, Scientific Linux - """ - - def check_pkg(self, pkg): - return self.run_command_with_code(['rpm', '-q', pkg], - check_exit_code=False)[1] == 0 - - def yum_install(self, pkg, **kwargs): - print "Attempting to install '%s' via yum" % pkg - self.run_command(['sudo', 'yum', 'install', '-y', pkg], **kwargs) - - def apply_patch(self, originalfile, patchfile): - self.run_command(['patch', '-N', originalfile, patchfile], - check_exit_code=False) - - def install_virtualenv(self): - if self.check_cmd('virtualenv'): - return - - if not self.check_pkg('python-virtualenv'): - self.yum_install('python-virtualenv', check_exit_code=False) - - super(Fedora, self).install_virtualenv() - - def post_process(self): - """Workaround for a bug in eventlet. - - This currently affects RHEL6.1, but the fix can safely be - applied to all RHEL and Fedora distributions. - - This can be removed when the fix is applied upstream. - - Nova: https://bugs.launchpad.net/nova/+bug/884915 - Upstream: https://bitbucket.org/which_linden/eventlet/issue/89 - """ - - # Install "patch" program if it's not there - if not self.check_pkg('patch'): - self.yum_install('patch') - - # Apply the eventlet patch - self.apply_patch(os.path.join(self.venv, 'lib', self.py_version, - 'site-packages', - 'eventlet/green/subprocess.py'), - 'contrib/redhat-eventlet.patch') diff --git a/tools/patch_tox_venv.py b/tools/patch_tox_venv.py deleted file mode 100644 index 7a8f8fb..0000000 --- a/tools/patch_tox_venv.py +++ /dev/null @@ -1,39 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys - -import install_venv_common as install_venv - - -def main(argv): - root = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - - venv = os.environ['VIRTUAL_ENV'] - - pip_requires = os.path.join(root, 'tools', 'pip-requires') - pip_options = os.path.join(root, 'tools', 'pip-options') - test_requires = os.path.join(root, 'tools', 'test-requires') - py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1]) - project = 'Quantum' - install = install_venv.InstallVenv(root, venv, pip_requires, pip_options, - test_requires, py_version, project) - #NOTE(dprince): For Tox we only run post_process (which patches files, etc) - install.post_process() - -if __name__ == '__main__': - main(sys.argv) diff --git a/tools/pip-options b/tools/pip-options deleted file mode 100644 index 29a631f..0000000 --- a/tools/pip-options +++ /dev/null @@ -1,8 +0,0 @@ -# Optional Stuff that is used by default -alembic -SQLAlchemy>=0.7.8,<=0.7.9 -kombu - -# Identity -python-memcached -passlib diff --git a/tools/pip-requires b/tools/pip-requires deleted file mode 100644 index ab08c34..0000000 --- a/tools/pip-requires +++ /dev/null @@ -1,21 +0,0 @@ -Babel>=0.9.6 -pbr>=0.5.21,<1.0 -# This file is managed by openstack-depends -argparse -cliff>=1.4 -eventlet>=0.13.0 -extras -pecan>=0.2.0 -iso8601>=0.1.4 -netaddr -oslo.config>=1.1.0 -Paste -PasteDeploy>=1.5.0 -pycountry -Routes>=1.12.3 -stevedore>=0.10 -WebOb>=1.2.3,<1.3 -https://github.com/stackforge/wsme/archive/master.zip#egg=WSME -# Taskflow -threading2 -networkx diff --git a/tools/setup-requires b/tools/setup-requires deleted file mode 100644 index e69de29..0000000 diff --git a/tools/with_venv.sh b/tools/with_venv.sh index 5c4a271..63f5b98 100755 --- a/tools/with_venv.sh +++ b/tools/with_venv.sh @@ -18,4 +18,4 @@ TOOLS=`dirname $0` VENV=$TOOLS/../.venv -source $VENV/bin/activate && $@ +source $VENV/bin/activate && "$@" diff --git a/tox.ini b/tox.ini index 9d3e754..50462a0 100644 --- a/tox.ini +++ b/tox.ini @@ -2,26 +2,24 @@ envlist = py26,py27,pep8 [testenv] +#usedevelop = True +install_command = pip install {opts} {packages} setenv = VIRTUAL_ENV={envdir} -deps = -r{toxinidir}/tools/pip-requires - -r{toxinidir}/tools/pip-options - -r{toxinidir}/tools/test-requires +deps = -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt setuptools_git>=0.4 -commands = - python tools/patch_tox_venv.py - python setup.py testr --slowest --testr-args='{posargs}' +commands = python setup.py testr --slowest --testr-args='{posargs}' [tox:jenkins] -sitepackages = True downloadcache = ~/cache/pip [testenv:pep8] +deps = flake8 commands = flake8 [testenv:cover] commands = - python tools/patch_tox_venv.py python setup.py testr --coverage --testr-args='{posargs}' [testenv:venv] @@ -38,5 +36,4 @@ commands = {posargs} # TODO(markmcclain) H202 assertRaises Exception too broad ignore = E711,E712,E125,H301,H302,H404,H901,H902,H202 show-source = true -builtins = _ -exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tools +exclude = .venv,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tests,build From c37838130693ac11bde4f16ea48d44b4d0c4bbde Mon Sep 17 00:00:00 2001 From: Endre Karlson Date: Sat, 16 Nov 2013 23:56:49 +0100 Subject: [PATCH 181/182] Update testr conf Change-Id: Ib64f5ae86b35f0219466b44ac8317401f7be2ff0 --- .testr.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.testr.conf b/.testr.conf index 8737d52..60477e8 100644 --- a/.testr.conf +++ b/.testr.conf @@ -1,4 +1,4 @@ [DEFAULT] -test_command=OS_STDOUT_CAPTURE=1 OS_STDERR_CAPTURE=1 ${PYTHON:-python} -m subunit.run discover -t ./ billingstack/tests $LISTOPT $IDOPTION +test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} ${PYTHON:-python} -m subunit.run discover -t ./ ./ $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE -test_list_option=--list \ No newline at end of file +test_list_option=--list From d6f0ac42347c8a9340816dee92aebcc28078c349 Mon Sep 17 00:00:00 2001 From: Monty Taylor Date: Sat, 17 Oct 2015 16:02:34 -0400 Subject: [PATCH 182/182] Retire stackforge/billingstack --- .coveragerc | 7 - .gitignore | 58 -- .gitreview | 4 - .pylintrc | 42 - .testr.conf | 4 - HACKING.rst | 253 ------ LICENSE | 175 ---- MANIFEST.in | 11 - README.rst | 11 +- billingstack.sublime-project | 59 -- billingstack/__init__.py | 15 - billingstack/api/__init__.py | 31 - billingstack/api/app.py | 91 -- billingstack/api/base.py | 158 ---- billingstack/api/hooks.py | 40 - billingstack/api/templates/error.html | 0 billingstack/api/templates/index.html | 9 - billingstack/api/utils.py | 64 -- billingstack/api/v2/__init__.py | 18 - billingstack/api/v2/controllers/__init__.py | 15 - billingstack/api/v2/controllers/currency.py | 67 -- billingstack/api/v2/controllers/customer.py | 74 -- billingstack/api/v2/controllers/invoice.py | 73 -- .../api/v2/controllers/invoice_state.py | 68 -- billingstack/api/v2/controllers/language.py | 67 -- billingstack/api/v2/controllers/merchant.py | 85 -- billingstack/api/v2/controllers/payment.py | 141 --- billingstack/api/v2/controllers/plan.py | 116 --- billingstack/api/v2/controllers/product.py | 74 -- billingstack/api/v2/controllers/root.py | 42 - .../api/v2/controllers/subscription.py | 75 -- billingstack/api/v2/controllers/usage.py | 73 -- billingstack/api/v2/models.py | 221 ----- billingstack/biller/__init__.py | 27 - billingstack/biller/rpcapi.py | 94 -- billingstack/biller/service.py | 105 --- billingstack/biller/storage/__init__.py | 26 - .../biller/storage/impl_sqlalchemy.py | 246 ----- billingstack/central/__init__.py | 28 - billingstack/central/flows/__init__.py | 0 billingstack/central/flows/merchant.py | 43 - billingstack/central/rpcapi.py | 211 ----- billingstack/central/service.py | 215 ----- billingstack/central/storage/__init__.py | 31 - .../storage/impl_sqlalchemy/__init__.py | 502 ---------- .../impl_sqlalchemy/migration/README.md | 94 -- .../impl_sqlalchemy/migration/__init__.py | 0 .../impl_sqlalchemy/migration/alembic.ini | 52 -- .../migration/alembic_migrations/__init__.py | 0 .../migration/alembic_migrations/env.py | 91 -- .../alembic_migrations/script.py.mako | 40 - .../alembic_migrations/versions/README | 3 - .../storage/impl_sqlalchemy/migration/cli.py | 125 --- .../central/storage/impl_sqlalchemy/models.py | 228 ----- billingstack/collector/__init__.py | 27 - billingstack/collector/flows/__init__.py | 17 - .../collector/flows/gateway_configuration.py | 97 -- .../collector/flows/payment_method.py | 103 --- billingstack/collector/rpcapi.py | 94 -- billingstack/collector/service.py | 108 --- billingstack/collector/states.py | 21 - billingstack/collector/storage/__init__.py | 108 --- .../collector/storage/impl_sqlalchemy.py | 263 ------ billingstack/conf.py | 31 - billingstack/exceptions.py | 89 -- billingstack/manage/__init__.py | 33 - billingstack/manage/base.py | 86 -- billingstack/manage/database.py | 34 - billingstack/manage/provider.py | 42 - billingstack/netconf.py | 59 -- billingstack/openstack/__init__.py | 0 billingstack/openstack/common/__init__.py | 0 billingstack/openstack/common/context.py | 86 -- .../openstack/common/crypto/__init__.py | 0 billingstack/openstack/common/crypto/utils.py | 179 ---- billingstack/openstack/common/db/__init__.py | 16 - billingstack/openstack/common/db/api.py | 106 --- billingstack/openstack/common/db/exception.py | 51 -- .../common/db/sqlalchemy/__init__.py | 16 - .../openstack/common/db/sqlalchemy/models.py | 103 --- .../openstack/common/db/sqlalchemy/utils.py | 132 --- .../openstack/common/eventlet_backdoor.py | 146 --- billingstack/openstack/common/exception.py | 139 --- billingstack/openstack/common/excutils.py | 101 --- billingstack/openstack/common/fileutils.py | 139 --- billingstack/openstack/common/gettextutils.py | 373 -------- billingstack/openstack/common/importutils.py | 68 -- billingstack/openstack/common/iniparser.py | 130 --- billingstack/openstack/common/jsonutils.py | 180 ---- billingstack/openstack/common/local.py | 47 - billingstack/openstack/common/lockutils.py | 305 ------- billingstack/openstack/common/log.py | 626 ------------- billingstack/openstack/common/loopingcall.py | 147 --- .../openstack/common/network_utils.py | 81 -- .../openstack/common/notifier/__init__.py | 14 - billingstack/openstack/common/notifier/api.py | 173 ---- .../openstack/common/notifier/log_notifier.py | 37 - .../common/notifier/no_op_notifier.py | 19 - .../common/notifier/rabbit_notifier.py | 46 - .../openstack/common/notifier/rpc_notifier.py | 47 - .../common/notifier/rpc_notifier2.py | 53 -- .../common/notifier/test_notifier.py | 22 - billingstack/openstack/common/processutils.py | 250 ----- billingstack/openstack/common/rpc/__init__.py | 306 ------- billingstack/openstack/common/rpc/amqp.py | 636 ------------- billingstack/openstack/common/rpc/common.py | 506 ----------- .../openstack/common/rpc/dispatcher.py | 178 ---- .../openstack/common/rpc/impl_fake.py | 195 ---- .../openstack/common/rpc/impl_kombu.py | 856 ------------------ .../openstack/common/rpc/impl_qpid.py | 833 ----------------- billingstack/openstack/common/rpc/impl_zmq.py | 818 ----------------- .../openstack/common/rpc/matchmaker.py | 324 ------- .../openstack/common/rpc/matchmaker_redis.py | 145 --- .../openstack/common/rpc/matchmaker_ring.py | 108 --- billingstack/openstack/common/rpc/proxy.py | 225 ----- .../openstack/common/rpc/securemessage.py | 521 ----------- .../openstack/common/rpc/serializer.py | 54 -- billingstack/openstack/common/rpc/service.py | 78 -- .../openstack/common/rpc/zmq_receiver.py | 40 - billingstack/openstack/common/service.py | 461 ---------- billingstack/openstack/common/sslutils.py | 100 -- billingstack/openstack/common/test.py | 54 -- billingstack/openstack/common/threadgroup.py | 125 --- billingstack/openstack/common/timeutils.py | 197 ---- billingstack/openstack/common/utils.py | 140 --- billingstack/openstack/common/uuidutils.py | 39 - billingstack/openstack/common/versionutils.py | 45 - billingstack/openstack/common/wsgi.py | 797 ---------------- billingstack/openstack/common/xmlutils.py | 74 -- billingstack/paths.py | 68 -- billingstack/payment_gateway/__init__.py | 56 -- billingstack/payment_gateway/base.py | 179 ---- billingstack/payment_gateway/dummy.py | 48 - billingstack/plugin.py | 82 -- billingstack/rater/__init__.py | 27 - billingstack/rater/rpcapi.py | 55 -- billingstack/rater/service.py | 77 -- billingstack/rater/storage/__init__.py | 39 - billingstack/rater/storage/impl_sqlalchemy.py | 89 -- billingstack/samples.py | 43 - billingstack/samples_data/contact_info.json | 15 - billingstack/samples_data/currency.json | 8 - billingstack/samples_data/customer.json | 5 - .../fixtures/currencies_get_response.json | 9 - .../fixtures/currencies_post_request.json | 4 - .../fixtures/currencies_post_response.json | 5 - .../fixtures/languages_get_response.json | 9 - .../fixtures/languages_post_request.json | 4 - .../fixtures/languages_post_response.json | 5 - .../merchant_products_get_response.json | 9 - .../merchant_products_post_request.json | 4 - .../merchant_products_post_response.json | 5 - .../fixtures/merchant_users_get_response.json | 6 - .../fixtures/merchant_users_post_request.json | 4 - .../merchant_users_post_response.json | 6 - .../fixtures/merchants_get_response.json | 13 - .../fixtures/merchants_post_request.json | 6 - .../fixtures/merchants_post_response.json | 7 - ...ayment_gateway_providers_get_response.json | 9 - ...ayment_gateway_providers_post_request.json | 9 - ...yment_gateway_providers_post_response.json | 9 - billingstack/samples_data/invoice_state.json | 7 - billingstack/samples_data/language.json | 8 - billingstack/samples_data/merchant.json | 6 - billingstack/samples_data/payment_method.json | 8 - billingstack/samples_data/pg_config.json | 6 - billingstack/samples_data/pg_method.json | 20 - billingstack/samples_data/pg_provider.json | 7 - billingstack/samples_data/plan.json | 9 - billingstack/samples_data/product.json | 182 ---- billingstack/samples_data/user.json | 6 - billingstack/service.py | 62 -- billingstack/sqlalchemy/__init__.py | 15 - billingstack/sqlalchemy/api.py | 253 ------ billingstack/sqlalchemy/model_base.py | 143 --- billingstack/sqlalchemy/session.py | 250 ----- billingstack/sqlalchemy/types.py | 87 -- billingstack/sqlalchemy/utils.py | 58 -- billingstack/storage/__init__.py | 15 - billingstack/storage/base.py | 40 - billingstack/storage/filterer.py | 93 -- billingstack/storage/utils.py | 49 - billingstack/tasks.py | 66 -- billingstack/tests/__init__.py | 0 billingstack/tests/api/__init__.py | 0 billingstack/tests/api/base.py | 179 ---- billingstack/tests/api/v2/__init__.py | 5 - billingstack/tests/api/v2/test_currency.py | 67 -- billingstack/tests/api/v2/test_customer.py | 83 -- .../tests/api/v2/test_invoice_state.py | 73 -- billingstack/tests/api/v2/test_language.py | 67 -- billingstack/tests/api/v2/test_merchant.py | 61 -- .../tests/api/v2/test_payment_method.py | 105 --- billingstack/tests/api/v2/test_plan.py | 67 -- billingstack/tests/api/v2/test_product.py | 70 -- billingstack/tests/base.py | 488 ---------- billingstack/tests/biller/__init__.py | 0 billingstack/tests/biller/storage/__init__.py | 0 billingstack/tests/central/__init__.py | 0 .../tests/central/storage/__init__.py | 249 ----- .../tests/central/storage/test_sqlalchemy.py | 30 - billingstack/tests/collector/__init__.py | 0 .../tests/collector/storage/__init__.py | 293 ------ .../collector/storage/test_sqlalchemy.py | 29 - .../tests/payment_gateway/__init__.py | 0 billingstack/tests/payment_gateway/base.py | 63 -- billingstack/tests/rater/__init__.py | 0 billingstack/tests/rater/storage/__init__.py | 0 billingstack/tests/storage/__init__.py | 0 billingstack/tests/test_utils.py | 22 - billingstack/utils.py | 147 --- billingstack/version.py | 19 - bin/billingstack-db-manage | 26 - bin/billingstack-manage | 30 - bin/billingstack-rpc-zmq-receiver | 53 -- doc/requirements.txt | 23 - doc/source/api.rst | 11 - doc/source/architecture.rst | 31 - doc/source/conf.py | 242 ----- doc/source/database.yuml | 37 - doc/source/developing.rst | 66 -- doc/source/glossary.rst | 38 - doc/source/index.rst | 28 - doc/source/install/common.rst | 85 -- doc/source/install/index.rst | 28 - doc/source/install/macos.rst | 167 ---- doc/source/install/manual.rst | 134 --- doc/source/install/packages.rst | 34 - doc/source/install/pgp.rst | 61 -- doc/source/payment.yuml | 8 - doc/source/resources/api_filtering.rst | 104 --- doc/source/resources/index.rst | 26 - doc/source/resources/subscriptions.rst | 96 -- etc/billingstack/billingstack.conf.sample | 106 --- etc/billingstack/policy.json | 1 - openstack.conf | 30 - requirements.txt | 28 - run_tests.sh | 237 ----- setup.cfg | 74 -- setup.py | 22 - test-requirements.txt | 15 - tools/control.sh | 255 ------ tools/load_samples.py | 92 -- tools/resync_storage.py | 38 - tools/with_venv.sh | 21 - tox.ini | 39 - 246 files changed, 5 insertions(+), 24003 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .gitignore delete mode 100644 .gitreview delete mode 100644 .pylintrc delete mode 100644 .testr.conf delete mode 100644 HACKING.rst delete mode 100644 LICENSE delete mode 100644 MANIFEST.in delete mode 100644 billingstack.sublime-project delete mode 100644 billingstack/__init__.py delete mode 100644 billingstack/api/__init__.py delete mode 100644 billingstack/api/app.py delete mode 100644 billingstack/api/base.py delete mode 100644 billingstack/api/hooks.py delete mode 100644 billingstack/api/templates/error.html delete mode 100644 billingstack/api/templates/index.html delete mode 100644 billingstack/api/utils.py delete mode 100644 billingstack/api/v2/__init__.py delete mode 100644 billingstack/api/v2/controllers/__init__.py delete mode 100644 billingstack/api/v2/controllers/currency.py delete mode 100644 billingstack/api/v2/controllers/customer.py delete mode 100644 billingstack/api/v2/controllers/invoice.py delete mode 100644 billingstack/api/v2/controllers/invoice_state.py delete mode 100644 billingstack/api/v2/controllers/language.py delete mode 100644 billingstack/api/v2/controllers/merchant.py delete mode 100644 billingstack/api/v2/controllers/payment.py delete mode 100644 billingstack/api/v2/controllers/plan.py delete mode 100644 billingstack/api/v2/controllers/product.py delete mode 100644 billingstack/api/v2/controllers/root.py delete mode 100644 billingstack/api/v2/controllers/subscription.py delete mode 100644 billingstack/api/v2/controllers/usage.py delete mode 100644 billingstack/api/v2/models.py delete mode 100644 billingstack/biller/__init__.py delete mode 100644 billingstack/biller/rpcapi.py delete mode 100644 billingstack/biller/service.py delete mode 100644 billingstack/biller/storage/__init__.py delete mode 100644 billingstack/biller/storage/impl_sqlalchemy.py delete mode 100644 billingstack/central/__init__.py delete mode 100644 billingstack/central/flows/__init__.py delete mode 100644 billingstack/central/flows/merchant.py delete mode 100644 billingstack/central/rpcapi.py delete mode 100644 billingstack/central/service.py delete mode 100644 billingstack/central/storage/__init__.py delete mode 100644 billingstack/central/storage/impl_sqlalchemy/__init__.py delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/README.md delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/__init__.py delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README delete mode 100644 billingstack/central/storage/impl_sqlalchemy/migration/cli.py delete mode 100644 billingstack/central/storage/impl_sqlalchemy/models.py delete mode 100644 billingstack/collector/__init__.py delete mode 100644 billingstack/collector/flows/__init__.py delete mode 100644 billingstack/collector/flows/gateway_configuration.py delete mode 100644 billingstack/collector/flows/payment_method.py delete mode 100644 billingstack/collector/rpcapi.py delete mode 100644 billingstack/collector/service.py delete mode 100644 billingstack/collector/states.py delete mode 100644 billingstack/collector/storage/__init__.py delete mode 100644 billingstack/collector/storage/impl_sqlalchemy.py delete mode 100644 billingstack/conf.py delete mode 100644 billingstack/exceptions.py delete mode 100644 billingstack/manage/__init__.py delete mode 100644 billingstack/manage/base.py delete mode 100644 billingstack/manage/database.py delete mode 100644 billingstack/manage/provider.py delete mode 100644 billingstack/netconf.py delete mode 100644 billingstack/openstack/__init__.py delete mode 100644 billingstack/openstack/common/__init__.py delete mode 100644 billingstack/openstack/common/context.py delete mode 100644 billingstack/openstack/common/crypto/__init__.py delete mode 100644 billingstack/openstack/common/crypto/utils.py delete mode 100644 billingstack/openstack/common/db/__init__.py delete mode 100644 billingstack/openstack/common/db/api.py delete mode 100644 billingstack/openstack/common/db/exception.py delete mode 100644 billingstack/openstack/common/db/sqlalchemy/__init__.py delete mode 100644 billingstack/openstack/common/db/sqlalchemy/models.py delete mode 100644 billingstack/openstack/common/db/sqlalchemy/utils.py delete mode 100644 billingstack/openstack/common/eventlet_backdoor.py delete mode 100644 billingstack/openstack/common/exception.py delete mode 100644 billingstack/openstack/common/excutils.py delete mode 100644 billingstack/openstack/common/fileutils.py delete mode 100644 billingstack/openstack/common/gettextutils.py delete mode 100644 billingstack/openstack/common/importutils.py delete mode 100644 billingstack/openstack/common/iniparser.py delete mode 100644 billingstack/openstack/common/jsonutils.py delete mode 100644 billingstack/openstack/common/local.py delete mode 100644 billingstack/openstack/common/lockutils.py delete mode 100644 billingstack/openstack/common/log.py delete mode 100644 billingstack/openstack/common/loopingcall.py delete mode 100644 billingstack/openstack/common/network_utils.py delete mode 100644 billingstack/openstack/common/notifier/__init__.py delete mode 100644 billingstack/openstack/common/notifier/api.py delete mode 100644 billingstack/openstack/common/notifier/log_notifier.py delete mode 100644 billingstack/openstack/common/notifier/no_op_notifier.py delete mode 100644 billingstack/openstack/common/notifier/rabbit_notifier.py delete mode 100644 billingstack/openstack/common/notifier/rpc_notifier.py delete mode 100644 billingstack/openstack/common/notifier/rpc_notifier2.py delete mode 100644 billingstack/openstack/common/notifier/test_notifier.py delete mode 100644 billingstack/openstack/common/processutils.py delete mode 100644 billingstack/openstack/common/rpc/__init__.py delete mode 100644 billingstack/openstack/common/rpc/amqp.py delete mode 100644 billingstack/openstack/common/rpc/common.py delete mode 100644 billingstack/openstack/common/rpc/dispatcher.py delete mode 100644 billingstack/openstack/common/rpc/impl_fake.py delete mode 100644 billingstack/openstack/common/rpc/impl_kombu.py delete mode 100644 billingstack/openstack/common/rpc/impl_qpid.py delete mode 100644 billingstack/openstack/common/rpc/impl_zmq.py delete mode 100644 billingstack/openstack/common/rpc/matchmaker.py delete mode 100644 billingstack/openstack/common/rpc/matchmaker_redis.py delete mode 100644 billingstack/openstack/common/rpc/matchmaker_ring.py delete mode 100644 billingstack/openstack/common/rpc/proxy.py delete mode 100644 billingstack/openstack/common/rpc/securemessage.py delete mode 100644 billingstack/openstack/common/rpc/serializer.py delete mode 100644 billingstack/openstack/common/rpc/service.py delete mode 100644 billingstack/openstack/common/rpc/zmq_receiver.py delete mode 100644 billingstack/openstack/common/service.py delete mode 100644 billingstack/openstack/common/sslutils.py delete mode 100644 billingstack/openstack/common/test.py delete mode 100644 billingstack/openstack/common/threadgroup.py delete mode 100644 billingstack/openstack/common/timeutils.py delete mode 100644 billingstack/openstack/common/utils.py delete mode 100644 billingstack/openstack/common/uuidutils.py delete mode 100644 billingstack/openstack/common/versionutils.py delete mode 100644 billingstack/openstack/common/wsgi.py delete mode 100644 billingstack/openstack/common/xmlutils.py delete mode 100644 billingstack/paths.py delete mode 100644 billingstack/payment_gateway/__init__.py delete mode 100644 billingstack/payment_gateway/base.py delete mode 100644 billingstack/payment_gateway/dummy.py delete mode 100644 billingstack/plugin.py delete mode 100644 billingstack/rater/__init__.py delete mode 100644 billingstack/rater/rpcapi.py delete mode 100644 billingstack/rater/service.py delete mode 100644 billingstack/rater/storage/__init__.py delete mode 100644 billingstack/rater/storage/impl_sqlalchemy.py delete mode 100644 billingstack/samples.py delete mode 100644 billingstack/samples_data/contact_info.json delete mode 100644 billingstack/samples_data/currency.json delete mode 100644 billingstack/samples_data/customer.json delete mode 100644 billingstack/samples_data/fixtures/currencies_get_response.json delete mode 100644 billingstack/samples_data/fixtures/currencies_post_request.json delete mode 100644 billingstack/samples_data/fixtures/currencies_post_response.json delete mode 100644 billingstack/samples_data/fixtures/languages_get_response.json delete mode 100644 billingstack/samples_data/fixtures/languages_post_request.json delete mode 100644 billingstack/samples_data/fixtures/languages_post_response.json delete mode 100644 billingstack/samples_data/fixtures/merchant_products_get_response.json delete mode 100644 billingstack/samples_data/fixtures/merchant_products_post_request.json delete mode 100644 billingstack/samples_data/fixtures/merchant_products_post_response.json delete mode 100644 billingstack/samples_data/fixtures/merchant_users_get_response.json delete mode 100644 billingstack/samples_data/fixtures/merchant_users_post_request.json delete mode 100644 billingstack/samples_data/fixtures/merchant_users_post_response.json delete mode 100644 billingstack/samples_data/fixtures/merchants_get_response.json delete mode 100644 billingstack/samples_data/fixtures/merchants_post_request.json delete mode 100644 billingstack/samples_data/fixtures/merchants_post_response.json delete mode 100644 billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json delete mode 100644 billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json delete mode 100644 billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json delete mode 100644 billingstack/samples_data/invoice_state.json delete mode 100644 billingstack/samples_data/language.json delete mode 100644 billingstack/samples_data/merchant.json delete mode 100644 billingstack/samples_data/payment_method.json delete mode 100644 billingstack/samples_data/pg_config.json delete mode 100644 billingstack/samples_data/pg_method.json delete mode 100644 billingstack/samples_data/pg_provider.json delete mode 100644 billingstack/samples_data/plan.json delete mode 100644 billingstack/samples_data/product.json delete mode 100644 billingstack/samples_data/user.json delete mode 100644 billingstack/service.py delete mode 100644 billingstack/sqlalchemy/__init__.py delete mode 100644 billingstack/sqlalchemy/api.py delete mode 100644 billingstack/sqlalchemy/model_base.py delete mode 100644 billingstack/sqlalchemy/session.py delete mode 100644 billingstack/sqlalchemy/types.py delete mode 100644 billingstack/sqlalchemy/utils.py delete mode 100644 billingstack/storage/__init__.py delete mode 100644 billingstack/storage/base.py delete mode 100644 billingstack/storage/filterer.py delete mode 100644 billingstack/storage/utils.py delete mode 100644 billingstack/tasks.py delete mode 100644 billingstack/tests/__init__.py delete mode 100644 billingstack/tests/api/__init__.py delete mode 100644 billingstack/tests/api/base.py delete mode 100644 billingstack/tests/api/v2/__init__.py delete mode 100644 billingstack/tests/api/v2/test_currency.py delete mode 100644 billingstack/tests/api/v2/test_customer.py delete mode 100644 billingstack/tests/api/v2/test_invoice_state.py delete mode 100644 billingstack/tests/api/v2/test_language.py delete mode 100644 billingstack/tests/api/v2/test_merchant.py delete mode 100644 billingstack/tests/api/v2/test_payment_method.py delete mode 100644 billingstack/tests/api/v2/test_plan.py delete mode 100644 billingstack/tests/api/v2/test_product.py delete mode 100644 billingstack/tests/base.py delete mode 100644 billingstack/tests/biller/__init__.py delete mode 100644 billingstack/tests/biller/storage/__init__.py delete mode 100644 billingstack/tests/central/__init__.py delete mode 100644 billingstack/tests/central/storage/__init__.py delete mode 100644 billingstack/tests/central/storage/test_sqlalchemy.py delete mode 100644 billingstack/tests/collector/__init__.py delete mode 100644 billingstack/tests/collector/storage/__init__.py delete mode 100644 billingstack/tests/collector/storage/test_sqlalchemy.py delete mode 100644 billingstack/tests/payment_gateway/__init__.py delete mode 100644 billingstack/tests/payment_gateway/base.py delete mode 100644 billingstack/tests/rater/__init__.py delete mode 100644 billingstack/tests/rater/storage/__init__.py delete mode 100644 billingstack/tests/storage/__init__.py delete mode 100644 billingstack/tests/test_utils.py delete mode 100644 billingstack/utils.py delete mode 100644 billingstack/version.py delete mode 100755 bin/billingstack-db-manage delete mode 100755 bin/billingstack-manage delete mode 100755 bin/billingstack-rpc-zmq-receiver delete mode 100644 doc/requirements.txt delete mode 100644 doc/source/api.rst delete mode 100644 doc/source/architecture.rst delete mode 100644 doc/source/conf.py delete mode 100644 doc/source/database.yuml delete mode 100644 doc/source/developing.rst delete mode 100644 doc/source/glossary.rst delete mode 100644 doc/source/index.rst delete mode 100644 doc/source/install/common.rst delete mode 100644 doc/source/install/index.rst delete mode 100644 doc/source/install/macos.rst delete mode 100644 doc/source/install/manual.rst delete mode 100644 doc/source/install/packages.rst delete mode 100644 doc/source/install/pgp.rst delete mode 100644 doc/source/payment.yuml delete mode 100644 doc/source/resources/api_filtering.rst delete mode 100644 doc/source/resources/index.rst delete mode 100644 doc/source/resources/subscriptions.rst delete mode 100644 etc/billingstack/billingstack.conf.sample delete mode 100644 etc/billingstack/policy.json delete mode 100644 openstack.conf delete mode 100644 requirements.txt delete mode 100755 run_tests.sh delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 test-requirements.txt delete mode 100755 tools/control.sh delete mode 100644 tools/load_samples.py delete mode 100644 tools/resync_storage.py delete mode 100755 tools/with_venv.sh delete mode 100644 tox.ini diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 8120c13..0000000 --- a/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True -source = billingstack -omit = billingstack/tests/*,billingstack/openstack/* - -[report] -ignore-errors = True diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 2a59534..0000000 --- a/.gitignore +++ /dev/null @@ -1,58 +0,0 @@ -*.py[cod] - -# C extensions -*.so - -# Packages -*.egg -*.egg-info -dist -build -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.tox -nosetests.xml -.testrepository - -# Translations -*.mo - -# Mr Developer -.mr.developer.cfg -.project -.pydevproject -.venv -.codeintel - -doc/source/api/* -doc/build/* -AUTHORS -TAGS -ChangeLog - -# Project specific -etc/billingstack/*.ini -etc/billingstack/*.conf -billingstack/versioninfo -*.sqlite - - -billingstack-screenrc -status -logs -.ropeproject -*.sublime-project -*.sublime-workspace diff --git a/.gitreview b/.gitreview deleted file mode 100644 index dc4afc4..0000000 --- a/.gitreview +++ /dev/null @@ -1,4 +0,0 @@ -[gerrit] -host=review.openstack.org -port=29418 -project=stackforge/billingstack.git diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 93fab95..0000000 --- a/.pylintrc +++ /dev/null @@ -1,42 +0,0 @@ -# The format of this file isn't really documented; just use --generate-rcfile -[MASTER] -# Add to the black list. It should be a base name, not a -# path. You may set this option multiple times. -ignore=test - -[Messages Control] -# NOTE(justinsb): We might want to have a 2nd strict pylintrc in future -# C0111: Don't require docstrings on every method -# W0511: TODOs in code comments are fine. -# W0142: *args and **kwargs are fine. -# W0622: Redefining id is fine. -disable=C0111,W0511,W0142,W0622 - -[Basic] -# Variable names can be 1 to 31 characters long, with lowercase and underscores -variable-rgx=[a-z_][a-z0-9_]{0,30}$ - -# Argument names can be 2 to 31 characters long, with lowercase and underscores -argument-rgx=[a-z_][a-z0-9_]{1,30}$ - -# Method names should be at least 3 characters long -# and be lowecased with underscores -method-rgx=([a-z_][a-z0-9_]{2,50}|setUp|tearDown)$ - -# Module names matching billingstack-* are ok (files in bin/) -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(billingstack-[a-z0-9_-]+))$ - -# Don't require docstrings on tests. -no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$ - -[Design] -max-public-methods=100 -min-public-methods=0 -max-args=6 - -[Variables] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -# _ is used by our localization -additional-builtins=_ diff --git a/.testr.conf b/.testr.conf deleted file mode 100644 index 60477e8..0000000 --- a/.testr.conf +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} ${PYTHON:-python} -m subunit.run discover -t ./ ./ $LISTOPT $IDOPTION -test_id_option=--load-list $IDFILE -test_list_option=--list diff --git a/HACKING.rst b/HACKING.rst deleted file mode 100644 index 5153db1..0000000 --- a/HACKING.rst +++ /dev/null @@ -1,253 +0,0 @@ -BillingStack Style Commandments -=============================== - -- Step 1: Read http://www.python.org/dev/peps/pep-0008/ -- Step 2: Read http://www.python.org/dev/peps/pep-0008/ again -- Step 3: Read on - - -General -------- -- Put two newlines between top-level code (funcs, classes, etc) -- Put one newline between methods in classes and anywhere else -- Do not write "except:", use "except Exception:" at the very least -- Include your name with TODOs as in "#TODO(termie)" -- Do not name anything the same name as a built-in or reserved word -- Use the "is not" operator when testing for unequal identities. Example:: - - if not X is Y: # BAD, intended behavior is ambiguous - pass - - if X is not Y: # OKAY, intuitive - pass - -- Use the "not in" operator for evaluating membership in a collection. Example:: - - if not X in Y: # BAD, intended behavior is ambiguous - pass - - if X not in Y: # OKAY, intuitive - pass - - if not (X in Y or X in Z): # OKAY, still better than all those 'not's - pass - - -Imports -------- -- Do not make relative imports -- Order your imports by the full module path -- Organize your imports according to the following template - -Example:: - - # vim: tabstop=4 shiftwidth=4 softtabstop=4 - {{stdlib imports in human alphabetical order}} - \n - {{third-party lib imports in human alphabetical order}} - \n - {{billingstack imports in human alphabetical order}} - \n - \n - {{begin your code}} - - -Human Alphabetical Order Examples ---------------------------------- -Example:: - - import httplib - import logging - import random - import StringIO - import time - import unittest - - import eventlet - import webob.exc - - from billingstack.api import v1 - from billingstack.central import rpc_api - from billingstack.rater import rpc_api - - -Docstrings ----------- - -Docstrings are required for all functions and methods. - -Docstrings should ONLY use triple-double-quotes (``"""``) - -Single-line docstrings should NEVER have extraneous whitespace -between enclosing triple-double-quotes. - -**INCORRECT** :: - - """ There is some whitespace between the enclosing quotes :( """ - -**CORRECT** :: - - """There is no whitespace between the enclosing quotes :)""" - -Docstrings that span more than one line should look like this: - -Example:: - - """ - Start the docstring on the line following the opening triple-double-quote - - If you are going to describe parameters and return values, use Sphinx, the - appropriate syntax is as follows. - - :param foo: the foo parameter - :param bar: the bar parameter - :returns: return_type -- description of the return value - :returns: description of the return value - :raises: AttributeError, KeyError - """ - -**DO NOT** leave an extra newline before the closing triple-double-quote. - - -Dictionaries/Lists ------------------- -If a dictionary (dict) or list object is longer than 80 characters, its items -should be split with newlines. Embedded iterables should have their items -indented. Additionally, the last item in the dictionary should have a trailing -comma. This increases readability and simplifies future diffs. - -Example:: - - my_dictionary = { - "image": { - "name": "Just a Snapshot", - "size": 2749573, - "properties": { - "user_id": 12, - "arch": "x86_64", - }, - "things": [ - "thing_one", - "thing_two", - ], - "status": "ACTIVE", - }, - } - - -Calling Methods ---------------- -Calls to methods 80 characters or longer should format each argument with -newlines. This is not a requirement, but a guideline:: - - unnecessarily_long_function_name('string one', - 'string two', - kwarg1=constants.ACTIVE, - kwarg2=['a', 'b', 'c']) - - -Rather than constructing parameters inline, it is better to break things up:: - - list_of_strings = [ - 'what_a_long_string', - 'not as long', - ] - - dict_of_numbers = { - 'one': 1, - 'two': 2, - 'twenty four': 24, - } - - object_one.call_a_method('string three', - 'string four', - kwarg1=list_of_strings, - kwarg2=dict_of_numbers) - - -Internationalization (i18n) Strings ------------------------------------ -In order to support multiple languages, we have a mechanism to support -automatic translations of exception and log strings. - -Example:: - - msg = _("An error occurred") - raise HTTPBadRequest(explanation=msg) - -If you have a variable to place within the string, first internationalize the -template string then do the replacement. - -Example:: - - msg = _("Missing parameter: %s") % ("flavor",) - LOG.error(msg) - -If you have multiple variables to place in the string, use keyword parameters. -This helps our translators reorder parameters when needed. - -Example:: - - msg = _("The server with id %(s_id)s has no key %(m_key)s") - LOG.error(msg % {"s_id": "1234", "m_key": "imageId"}) - - -Creating Unit Tests -------------------- -For every new feature, unit tests should be created that both test and -(implicitly) document the usage of said feature. If submitting a patch for a -bug that had no unit test, a new passing unit test should be added. If a -submitted bug fix does have a unit test, be sure to add a new one that fails -without the patch and passes with the patch. - - -Commit Messages ---------------- -Using a common format for commit messages will help keep our git history -readable. Follow these guidelines: - - First, provide a brief summary of 50 characters or less. Summaries - of greater then 72 characters will be rejected by the gate. - - The first line of the commit message should provide an accurate - description of the change, not just a reference to a bug or - blueprint. It must be followed by a single blank line. - - Following your brief summary, provide a more detailed description of - the patch, manually wrapping the text at 72 characters. This - description should provide enough detail that one does not have to - refer to external resources to determine its high-level functionality. - - Once you use 'git review', two lines will be appended to the commit - message: a blank line followed by a 'Change-Id'. This is important - to correlate this commit with a specific review in Gerrit, and it - should not be modified. - -For further information on constructing high quality commit messages, -and how to split up commits into a series of changes, consult the -project wiki: - - http://wiki.openstack.org/GitCommitMessages - - -openstack-common ----------------- - -A number of modules from openstack-common are imported into the project. - -These modules are "incubating" in openstack-common and are kept in sync -with the help of openstack-common's update.py script. See: - - http://wiki.openstack.org/CommonLibrary#Incubation - -The copy of the code should never be directly modified here. Please -always update openstack-common first and then run the script to copy -the changes across. - - -Logging -------- -Use __name__ as the name of your logger and name your module-level logger -objects 'LOG':: - - LOG = logging.getLogger(__name__) diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 67db858..0000000 --- a/LICENSE +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index d2bad60..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,11 +0,0 @@ -include AUTHORS -include ChangeLog -include billingstack/versioninfo -include *.txt *.ini *.cfg *.rst *.md -include etc/billingstack/*.sample -include etc/billingstack/policy.json - -exclude .gitignore -exclude .gitreview -exclude *.sublime-project -global-exclude *.pyc diff --git a/README.rst b/README.rst index fe84973..9006052 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,7 @@ -BillingStack -============ +This project is no longer maintained. -Site: www.billingstack.org +The contents of this repository are still available in the Git source code +management system. To see the contents of this repository before it reached +its end of life, please check out the previous commit with +"git checkout HEAD^1". -Docs: http://billingstack.rtfd.org -Github: http://github.com/stackforge/billingstack -Bugs: http://launchpad.net/billingstack diff --git a/billingstack.sublime-project b/billingstack.sublime-project deleted file mode 100644 index 87c9755..0000000 --- a/billingstack.sublime-project +++ /dev/null @@ -1,59 +0,0 @@ -{ - "folders": - [ - { - "file_exclude_patterns": - [ - "*.pyc", - "*.pyo", - "*.exe", - "*.dll", - "*.obj", - "*.o", - "*.a", - "*.lib", - "*.so", - "*.dylib", - "*.ncb", - "*.sdf", - "*.suo", - "*.pdb", - "*.idb", - ".DS_Store", - "*.class", - "*.psd", - "*.db", - ".vagrant", - ".noseids" - ], - "folder_exclude_patterns": - [ - ".svn", - ".git", - ".hg", - "CVS", - "*.egg", - "*.egg-info", - ".tox", - "venv", - ".venv", - "doc/build", - "doc/source/api" - ], - "path": "." - } - ], - "settings": - { - "default_line_ending": "unix", - "detect_indentation": false, - "ensure_newline_at_eof_on_save": true, - "rulers": - [ - 79 - ], - "tab_size": 4, - "translate_tabs_to_spaces": true, - "trim_trailing_white_space_on_save": true - } -} diff --git a/billingstack/__init__.py b/billingstack/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/api/__init__.py b/billingstack/api/__init__.py deleted file mode 100644 index 0defd31..0000000 --- a/billingstack/api/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2013 Woorea Solutions, S.L -# -# Author: Luis Gervaso -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from oslo.config import cfg - -API_SERVICE_OPTS = [ - cfg.IntOpt('api_port', default=9091, - help='The port for the billing API server'), - cfg.IntOpt('api_listen', default='0.0.0.0', help='Bind to address'), - cfg.StrOpt('auth_strategy', default='noauth', - help='The strategy to use for auth. Supports noauth or ' - 'keystone'), -] - -cfg.CONF.register_opts(API_SERVICE_OPTS, 'service:api') diff --git a/billingstack/api/app.py b/billingstack/api/app.py deleted file mode 100644 index 3819883..0000000 --- a/billingstack/api/app.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import logging -import os -import pecan -from oslo.config import cfg -from wsgiref import simple_server - -from billingstack import service -from billingstack.api import hooks -from billingstack.openstack.common import log - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = log.getLogger(__name__) - - -def get_config(): - conf = { - 'app': { - 'root': 'billingstack.api.v2.controllers.root.RootController', - 'modules': ['designate.api.v2'], - } - } - return pecan.configuration.conf_from_dict(conf) - - -def setup_app(pecan_config=None, extra_hooks=None): - app_hooks = [ - hooks.NoAuthHook() - ] - - if extra_hooks: - app_hooks.extend(extra_hooks) - - pecan_config = pecan_config or get_config() - - pecan.configuration.set_config(dict(pecan_config), overwrite=True) - - app = pecan.make_app( - pecan_config.app.root, - debug=cfg.CONF.debug, - hooks=app_hooks, - force_canonical=getattr(pecan_config.app, 'force_canonical', True) - ) - - return app - - -class VersionSelectorApplication(object): - def __init__(self): - self.v2 = setup_app() - - def __call__(self, environ, start_response): - return self.v2(environ, start_response) - - -def start(): - service.prepare_service() - - root = VersionSelectorApplication() - - host = cfg.CONF['service:api'].api_listen - port = cfg.CONF['service:api'].api_port - - srv = simple_server.make_server(host, port, root) - - LOG.info('Starting server in PID %s' % os.getpid()) - LOG.info("Configuration:") - cfg.CONF.log_opt_values(LOG, logging.INFO) - - if host == '0.0.0.0': - LOG.info('serving on 0.0.0.0:%s, view at http://127.0.0.1:%s' % - (port, port)) - else: - LOG.info("serving on http://%s:%s" % (host, port)) - - srv.serve_forever() diff --git a/billingstack/api/base.py b/billingstack/api/base.py deleted file mode 100644 index 08bd938..0000000 --- a/billingstack/api/base.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import pecan.rest - -from wsme.types import Base, Enum, UserType, text, Unset, wsproperty - -from oslo.config import cfg - -from billingstack.openstack.common import log - - -LOG = log.getLogger(__name__) - - -cfg.CONF.register_opts([ - cfg.StrOpt('cors_allowed_origin', default='*', help='Allowed CORS Origin'), - cfg.IntOpt('cors_max_age', default=3600)]) - - -CORS_ALLOW_HEADERS = [ - 'origin', - 'authorization', - 'accept', - 'content-type', - 'x-requested-with' -] - - -class RestController(pecan.rest.RestController): - def _handle_patch(self, method, remainder): - return self._handle_post(method, remainder) - - -class Property(UserType): - """ - A Property that just passes the value around... - """ - def tonativetype(self, value): - return value - - def fromnativetype(self, value): - return value - - -property_type = Property() - - -def _query_to_criterion(query, storage_func=None, **kw): - """ - Iterate over the query checking against the valid signatures (later). - - :param query: A list of queries. - :param storage_func: The name of the storage function to very against. - """ - translation = { - 'customer': 'customer_id' - } - - criterion = {} - for q in query: - key = translation.get(q.field, q.field) - criterion[key] = q.as_dict() - - criterion.update(kw) - - return criterion - - -operation_kind = Enum(str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt') - - -class Query(Base): - """ - Query filter. - """ - - _op = None # provide a default - - def get_op(self): - return self._op or 'eq' - - def set_op(self, value): - self._op = value - - field = text - "The name of the field to test" - - #op = wsme.wsattr(operation_kind, default='eq') - # this ^ doesn't seem to work. - op = wsproperty(operation_kind, get_op, set_op) - "The comparison operator. Defaults to 'eq'." - - value = text - "The value to compare against the stored data" - - def __repr__(self): - # for LOG calls - return '' % (self.field, self.op, self.value) - - @classmethod - def sample(cls): - return cls(field='resource_id', - op='eq', - value='bd9431c1-8d69-4ad3-803a-8d4a6b89fd36', - ) - - def as_dict(self): - return { - 'op': self.op, - 'field': self.field, - 'value': self.value - } - - -class ModelBase(Base): - def as_dict(self): - """ - Return this model as a dict - """ - data = {} - - for attr in self._wsme_attributes: - value = attr.__get__(self, self.__class__) - if value is not Unset: - if isinstance(value, Base) and hasattr(value, "as_dict"): - value = value.as_dict() - data[attr.name] = value - return data - - def to_db(self): - """ - Returns this Model object as it's DB form - - Example - 'currency' vs 'currency_name' - """ - return self.as_dict() - - @classmethod - def from_db(cls, values): - """ - Return a class of this object from values in the from_db - """ - return cls(**values) diff --git a/billingstack/api/hooks.py b/billingstack/api/hooks.py deleted file mode 100644 index e68269e..0000000 --- a/billingstack/api/hooks.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import hooks - -from billingstack.openstack.common.context import RequestContext - - -class NoAuthHook(hooks.PecanHook): - """ - Simple auth - all requests will be is_admin=True - """ - def merchant_id(self, path): - """ - Get merchant id from url - """ - parts = [p for p in path.split('/') if p] - try: - index = parts.index('merchants') + 1 - return parts[index] - except ValueError: - return - except IndexError: - return - - def before(self, state): - merchant_id = self.merchant_id(state.request.path_url) - state.request.ctxt = RequestContext(tenant=merchant_id, is_admin=True) diff --git a/billingstack/api/templates/error.html b/billingstack/api/templates/error.html deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/api/templates/index.html b/billingstack/api/templates/index.html deleted file mode 100644 index 27ae7ff..0000000 --- a/billingstack/api/templates/index.html +++ /dev/null @@ -1,9 +0,0 @@ - - - BillingStack Diagnostics - - -

Diagnostics

-

Here you'll find some basic information about your BillingStack server

- - diff --git a/billingstack/api/utils.py b/billingstack/api/utils.py deleted file mode 100644 index d0bc991..0000000 --- a/billingstack/api/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: http://flask.pocoo.org/snippets/56/ -from datetime import timedelta -from flask import make_response, request, current_app -import functools - - -def crossdomain(origin=None, methods=None, headers=None, - max_age=21600, attach_to_all=True, - automatic_options=True): - if methods is not None: - methods = ', '.join(sorted(x.upper() for x in methods)) - if headers is not None and not isinstance(headers, basestring): - headers = ', '.join(x.upper() for x in headers) - if not isinstance(origin, basestring): - origin = ', '.join(origin) - if isinstance(max_age, timedelta): - max_age = max_age.total_seconds() - - def get_methods(): - if methods is not None: - return methods - - options_resp = current_app.make_default_options_response() - return options_resp.headers['allow'] - - def decorator(f): - def wrapped_function(*args, **kw): - if automatic_options and request.method == 'OPTIONS': - resp = current_app.make_default_options_response() - else: - resp = make_response(f(*args, **kw)) - if not attach_to_all and request.method != 'OPTIONS': - return resp - - h = resp.headers - - h['Access-Control-Allow-Origin'] = origin - h['Access-Control-Allow-Credentials'] = 'true' - h['Access-Control-Allow-Methods'] = get_methods() - h['Access-Control-Max-Age'] = str(max_age) - if headers is not None: - h['Access-Control-Allow-Headers'] = headers - return resp - - f.provide_automatic_options = False - f.required_methods = ['OPTIONS'] - return functools.update_wrapper(wrapped_function, f) - return decorator diff --git a/billingstack/api/v2/__init__.py b/billingstack/api/v2/__init__.py deleted file mode 100644 index 71751cb..0000000 --- a/billingstack/api/v2/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.import_opt('state_path', 'billingstack.paths') diff --git a/billingstack/api/v2/controllers/__init__.py b/billingstack/api/v2/controllers/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/api/v2/controllers/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/api/v2/controllers/currency.py b/billingstack/api/v2/controllers/currency.py deleted file mode 100644 index 6f7176d..0000000 --- a/billingstack/api/v2/controllers/currency.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class CurrencyController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.Currency) - def get_all(self): - row = central_api.get_currency(request.ctxt, self.id_) - - return models.Currency.from_db(row) - - @wsme.validate(models.Currency) - @wsme_pecan.wsexpose(models.Currency, body=models.Currency) - def patch(self, body): - row = central_api.update_currency(request.ctxt, self.id_, body.to_db()) - return models.Currency.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_currency(request.ctxt, self.id_) - - -class CurrenciesController(RestController): - @expose() - def _lookup(self, currency_id, *remainder): - return CurrencyController(currency_id), remainder - - @wsme.validate(models.Currency) - @wsme_pecan.wsexpose(models.Currency, body=models.Currency, - status_code=202) - def post(self, body): - row = central_api.create_currency(request.ctxt, body.to_db()) - - return models.Currency.from_db(row) - - @wsme_pecan.wsexpose([models.Currency], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_currencies( - request.ctxt, criterion=criterion) - - return map(models.Currency.from_db, rows) diff --git a/billingstack/api/v2/controllers/customer.py b/billingstack/api/v2/controllers/customer.py deleted file mode 100644 index ea16ebd..0000000 --- a/billingstack/api/v2/controllers/customer.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.api.v2.controllers.payment import PaymentMethodsController -from billingstack.central.rpcapi import central_api - - -class CustomerController(RestController): - payment_methods = PaymentMethodsController() - - def __init__(self, id_): - self.id_ = id_ - request.context['customer_id'] = id_ - - @wsme_pecan.wsexpose(models.Customer) - def get_all(self): - row = central_api.get_customer(request.ctxt, self.id_) - - return models.Customer.from_db(row) - - @wsme.validate(models.Customer) - @wsme_pecan.wsexpose(models.Customer, body=models.Customer) - def patch(self, body): - row = central_api.update_customer(request.ctxt, self.id_, body.to_db()) - return models.Customer.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_customer(request.ctxt, self.id_) - - -class CustomersController(RestController): - @expose() - def _lookup(self, customer_id, *remainder): - return CustomerController(customer_id), remainder - - @wsme.validate(models.Customer) - @wsme_pecan.wsexpose(models.Customer, body=models.Customer, - status_code=202) - def post(self, body): - row = central_api.create_customer( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Customer.from_db(row) - - @wsme_pecan.wsexpose([models.Customer], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_customers( - request.ctxt, criterion=criterion) - - return map(models.Customer.from_db, rows) diff --git a/billingstack/api/v2/controllers/invoice.py b/billingstack/api/v2/controllers/invoice.py deleted file mode 100644 index 3bc1b0e..0000000 --- a/billingstack/api/v2/controllers/invoice.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.biller.rpcapi import biller_api - - -class InvoiceController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['invoice_id'] = id_ - - @wsme_pecan.wsexpose(models.Invoice) - def get_all(self): - row = biller_api.get_invoice(request.ctxt, self.id_) - - return models.Invoice.from_db(row) - - @wsme.validate(models.Invoice) - @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice) - def patch(self, body): - row = biller_api.update_invoice(request.ctxt, self.id_, body.to_db()) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - biller_api.delete_invoice(request.ctxt, self.id_) - - -class InvoicesController(RestController): - @expose() - def _lookup(self, invoice_id, *remainder): - return InvoiceController(invoice_id), remainder - - @wsme.validate(models.Invoice) - @wsme_pecan.wsexpose(models.Invoice, body=models.Invoice, status_code=202) - def post(self, body): - row = biller_api.create_invoice( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Invoice.from_db(row) - - @wsme_pecan.wsexpose([models.Invoice], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = biller_api.list_invoices( - request.ctxt, criterion=criterion) - - return map(models.Invoice.from_db, rows) diff --git a/billingstack/api/v2/controllers/invoice_state.py b/billingstack/api/v2/controllers/invoice_state.py deleted file mode 100644 index 0852a6a..0000000 --- a/billingstack/api/v2/controllers/invoice_state.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.biller.rpcapi import biller_api - - -class InvoiceStateController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.InvoiceState) - def get_all(self): - row = biller_api.get_invoice_state(request.ctxt, self.id_) - - return models.InvoiceState.from_db(row) - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState) - def patch(self, body): - row = biller_api.update_invoice_state( - request.ctxt, self.id_, body.to_db()) - return models.InvoiceState.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - biller_api.delete_invoice_state(request.ctxt, self.id_) - - -class InvoiceStatesController(RestController): - @expose() - def _lookup(self, invoice_state_id, *remainder): - return InvoiceStateController(invoice_state_id), remainder - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.InvoiceState, body=models.InvoiceState, - status_code=202) - def post(self, body): - row = biller_api.create_invoice_state(request.ctxt, body.to_db()) - - return models.InvoiceState.from_db(row) - - @wsme_pecan.wsexpose([models.InvoiceState], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = biller_api.list_invoice_states( - request.ctxt, criterion=criterion) - - return map(models.InvoiceState.from_db, rows) diff --git a/billingstack/api/v2/controllers/language.py b/billingstack/api/v2/controllers/language.py deleted file mode 100644 index 691f0d8..0000000 --- a/billingstack/api/v2/controllers/language.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class LanguageController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.Language) - def get_all(self): - row = central_api.get_language(request.ctxt, self.id_) - - return models.Language.from_db(row) - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.Language, body=models.Language) - def patch(self, body): - row = central_api.update_language(request.ctxt, self.id_, body.to_db()) - return models.Language.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_language(request.ctxt, self.id_) - - -class LanguagesController(RestController): - @expose() - def _lookup(self, language_id, *remainder): - return LanguageController(language_id), remainder - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.Language, body=models.Language, - status_code=202) - def post(self, body): - row = central_api.create_language(request.ctxt, body.to_db()) - - return models.Language.from_db(row) - - @wsme_pecan.wsexpose([models.Language], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_languages( - request.ctxt, criterion=criterion) - - return map(models.Language.from_db, rows) diff --git a/billingstack/api/v2/controllers/merchant.py b/billingstack/api/v2/controllers/merchant.py deleted file mode 100644 index e42ea74..0000000 --- a/billingstack/api/v2/controllers/merchant.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api -from billingstack.api.v2.controllers.customer import CustomersController -from billingstack.api.v2.controllers.payment import PGConfigsController -from billingstack.api.v2.controllers.plan import PlansController -from billingstack.api.v2.controllers.product import ProductsController -from billingstack.api.v2.controllers.subscription import \ - SubscriptionsController -from billingstack.api.v2.controllers.invoice import InvoicesController -from billingstack.api.v2.controllers.usage import UsagesController - - -class MerchantController(RestController): - customers = CustomersController() - payment_gateway_configurations = PGConfigsController() - plans = PlansController() - products = ProductsController() - subscriptions = SubscriptionsController() - - invoices = InvoicesController() - usage = UsagesController() - - def __init__(self, id_): - self.id_ = id_ - request.context['merchant_id'] = id_ - - @wsme_pecan.wsexpose(models.Merchant) - def get_all(self): - row = central_api.get_merchant(request.ctxt, self.id_) - - return models.Merchant.from_db(row) - - @wsme.validate(models.InvoiceState) - @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant) - def patch(self, body): - row = central_api.update_merchant(request.ctxt, self.id_, body.to_db()) - return models.Merchant.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_merchant(request.ctxt, self.id_) - - -class MerchantsController(RestController): - @expose() - def _lookup(self, merchant_id, *remainder): - return MerchantController(merchant_id), remainder - - @wsme.validate(models.Merchant) - @wsme_pecan.wsexpose(models.Merchant, body=models.Merchant, - status_code=202) - def post(self, body): - row = central_api.create_merchant(request.ctxt, body.to_db()) - - return models.Merchant.from_db(row) - - @wsme_pecan.wsexpose([models.Merchant], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = central_api.list_merchants( - request.ctxt, criterion=criterion) - - return map(models.Merchant.from_db, rows) diff --git a/billingstack/api/v2/controllers/payment.py b/billingstack/api/v2/controllers/payment.py deleted file mode 100644 index 8ad9a2f..0000000 --- a/billingstack/api/v2/controllers/payment.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.collector.rpcapi import collector_api - - -class PGProviders(RestController): - @wsme_pecan.wsexpose([models.PGProvider], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion(q) - - rows = collector_api.list_pg_providers( - request.ctxt, criterion=criterion) - - return map(models.PGProvider.from_db, rows) - - -class PGConfigController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme_pecan.wsexpose(models.PGConfig) - def get_all(self): - row = collector_api.get_pg_config(request.ctxt, self.id_) - - return models.PGConfig.from_db(row) - - @wsme.validate(models.PGConfig) - @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig) - def patch(self, body): - row = collector_api.update_pg_config( - request.ctxt, - self.id_, - body.to_db()) - - return models.PGConfig.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - collector_api.delete_pg_config(request.ctxt, self.id_) - - -class PGConfigsController(RestController): - @expose() - def _lookup(self, method_id, *remainder): - return PGConfigController(method_id), remainder - - @wsme.validate(models.PGConfig) - @wsme_pecan.wsexpose(models.PGConfig, body=models.PGConfig, - status_code=202) - def post(self, body): - values = body.to_db() - values['merchant_id'] = request.context['merchant_id'] - - row = collector_api.create_pg_config( - request.ctxt, - values) - - return models.PGConfig.from_db(row) - - @wsme_pecan.wsexpose([models.PGConfig], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, merchant_id=request.context['merchant_id']) - - rows = collector_api.list_pg_configs( - request.ctxt, criterion=criterion) - - return map(models.PGConfig.from_db, rows) - - -class PaymentMethodController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['payment_method_id'] = id_ - - @wsme_pecan.wsexpose(models.PaymentMethod) - def get_all(self): - row = collector_api.get_payment_method(request.ctxt, self.id_) - - return models.PaymentMethod.from_db(row) - - @wsme.validate(models.PaymentMethod) - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod) - def patch(self, body): - row = collector_api.update_payment_method( - request.ctxt, - self.id_, - body.to_db()) - - return models.PaymentMethod.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - collector_api.delete_payment_method(request.ctxt, self.id_) - - -class PaymentMethodsController(RestController): - @expose() - def _lookup(self, method_id, *remainder): - return PaymentMethodController(method_id), remainder - - @wsme.validate(models.PaymentMethod) - @wsme_pecan.wsexpose(models.PaymentMethod, body=models.PaymentMethod, - status_code=202) - def post(self, body): - values = body.to_db() - values['customer_id'] = request.context['customer_id'] - - row = collector_api.create_payment_method(request.ctxt, values) - - return models.PaymentMethod.from_db(row) - - @wsme_pecan.wsexpose([models.PaymentMethod], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, merchant_id=request.context['merchant_id'], - customer_id=request.context['customer_id']) - - rows = collector_api.list_payment_methods( - request.ctxt, criterion=criterion) - - return map(models.PaymentMethod.from_db, rows) diff --git a/billingstack/api/v2/controllers/plan.py b/billingstack/api/v2/controllers/plan.py deleted file mode 100644 index 519d8a8..0000000 --- a/billingstack/api/v2/controllers/plan.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class ItemController(RestController): - def __init__(self, id_): - self.id_ = id_ - - @wsme.validate(models.PlanItem) - @wsme_pecan.wsexpose(models.PlanItem, body=models.PlanItem) - def put(self, body): - values = { - 'plan_id': request.context['plan_id'], - 'product_id': self.id_ - } - - row = central_api.create_plan_item(request.ctxt, values) - - return models.PlanItem.from_db(row) - - @wsme.validate(models.PlanItem) - @wsme_pecan.wsexpose(models.PlanItem, body=models.PlanItem) - def patch(self, body): - row = central_api.update_plan_item( - request.ctxt, - request.context['plan_id'], - self.id_, - body.to_db()) - - return models.PlanItem.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self, id_): - central_api.delete_plan_item( - request.ctxt, - request.context['plan_id'], - id_) - - -class ItemsController(RestController): - @expose() - def _lookup(self, id_, *remainder): - return ItemController(id_), remainder - - -class PlanController(RestController): - items = ItemsController() - - def __init__(self, id_): - self.id_ = id_ - request.context['plan_id'] = id_ - - @wsme_pecan.wsexpose(models.Plan) - def get_all(self): - row = central_api.get_plan(request.ctxt, self.id_) - - return models.Plan.from_db(row) - - @wsme.validate(models.Plan) - @wsme_pecan.wsexpose(models.Plan, body=models.Plan) - def patch(self, body): - row = central_api.update_plan(request.ctxt, self.id_, body.to_db()) - - return models.Plan.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_plan(request.ctxt, self.id_) - - -class PlansController(RestController): - @expose() - def _lookup(self, plan_id, *remainder): - return PlanController(plan_id), remainder - - @wsme.validate(models.Plan) - @wsme_pecan.wsexpose(models.Plan, body=models.Plan, status_code=202) - def post(self, body): - row = central_api.create_plan( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Plan.from_db(row) - - @wsme_pecan.wsexpose([models.Plan], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = central_api.list_plans( - request.ctxt, criterion=criterion) - - return map(models.Plan.from_db, rows) diff --git a/billingstack/api/v2/controllers/product.py b/billingstack/api/v2/controllers/product.py deleted file mode 100644 index dae1ef3..0000000 --- a/billingstack/api/v2/controllers/product.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class ProductController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['product_id'] = id_ - - @wsme_pecan.wsexpose(models.Product) - def get_all(self): - row = central_api.get_product(request.ctxt, self.id_) - - return models.Product.from_db(row) - - @wsme.validate(models.Product) - @wsme_pecan.wsexpose(models.Product, body=models.Product) - def patch(self, body): - row = central_api.update_product(request.ctxt, self.id_, body.to_db()) - - return models.Product.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_product(request.ctxt, self.id_) - - -class ProductsController(RestController): - @expose() - def _lookup(self, product_id, *remainder): - return ProductController(product_id), remainder - - @wsme.validate(models.Product) - @wsme_pecan.wsexpose(models.Product, body=models.Product, - status_code=202) - def post(self, body): - row = central_api.create_product( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Product.from_db(row) - - @wsme_pecan.wsexpose([models.Product], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = central_api.list_products( - request.ctxt, criterion=criterion) - - return map(models.Product.from_db, rows) diff --git a/billingstack/api/v2/controllers/root.py b/billingstack/api/v2/controllers/root.py deleted file mode 100644 index a75a04a..0000000 --- a/billingstack/api/v2/controllers/root.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log -from billingstack.api.v2.controllers.currency import CurrenciesController -from billingstack.api.v2.controllers.language import LanguagesController -from billingstack.api.v2.controllers.merchant import MerchantsController -from billingstack.api.v2.controllers.invoice_state import \ - InvoiceStatesController -from billingstack.api.v2.controllers.payment import PGProviders - - -LOG = log.getLogger(__name__) - - -class V2Controller(object): - # Central - currencies = CurrenciesController() - languages = LanguagesController() - merchants = MerchantsController() - - # Biller - invoice_states = InvoiceStatesController() - - # Collector - payment_gateway_providers = PGProviders() - - -class RootController(object): - v2 = V2Controller() diff --git a/billingstack/api/v2/controllers/subscription.py b/billingstack/api/v2/controllers/subscription.py deleted file mode 100644 index fc9cf98..0000000 --- a/billingstack/api/v2/controllers/subscription.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.central.rpcapi import central_api - - -class SubscriptionController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['subscription_id'] = id_ - - @wsme_pecan.wsexpose(models.Subscription) - def get_all(self): - row = central_api.get_subscription(request.ctxt, self.id_) - - return models.Subscription.from_db(row) - - @wsme.validate(models.Subscription) - @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription) - def patch(self, body): - row = central_api.update_subscription(request.ctxt, self.id_, - body.to_db()) - - return models.Subscription.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - central_api.delete_subscription(request.ctxt, self.id_) - - -class SubscriptionsController(RestController): - @expose() - def _lookup(self, subscription_id, *remainder): - return SubscriptionController(subscription_id), remainder - - @wsme.validate(models.Subscription) - @wsme_pecan.wsexpose(models.Subscription, body=models.Subscription, - status_code=202) - def post(self, body): - row = central_api.create_subscription( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Subscription.from_db(row) - - @wsme_pecan.wsexpose([models.Subscription], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = central_api.list_subscriptions( - request.ctxt, criterion=criterion) - - return map(models.Subscription.from_db, rows) diff --git a/billingstack/api/v2/controllers/usage.py b/billingstack/api/v2/controllers/usage.py deleted file mode 100644 index 3b00e73..0000000 --- a/billingstack/api/v2/controllers/usage.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from pecan import expose, request -import wsme -import wsmeext.pecan as wsme_pecan - - -from billingstack.api.base import Query, _query_to_criterion, RestController -from billingstack.api.v2 import models -from billingstack.rater.rpcapi import rater_api - - -class UsageController(RestController): - def __init__(self, id_): - self.id_ = id_ - request.context['usage_id'] = id_ - - @wsme_pecan.wsexpose(models.Usage) - def get_all(self): - row = rater_api.get_usage(request.ctxt, self.id_) - - return models.Usage.from_db(row) - - @wsme.validate(models.Usage) - @wsme_pecan.wsexpose(models.Usage, body=models.Usage) - def patch(self, body): - row = rater_api.update_usage(request.ctxt, self.id_, body.to_db()) - - return models.Usage.from_db(row) - - @wsme_pecan.wsexpose(None, status_code=204) - def delete(self): - rater_api.delete_usage(request.ctxt, self.id_) - - -class UsagesController(RestController): - @expose() - def _lookup(self, usage_id, *remainder): - return UsageController(usage_id), remainder - - @wsme.validate(models.Usage) - @wsme_pecan.wsexpose(models.Usage, body=models.Usage, status_code=202) - def post(self, body): - row = rater_api.create_usage( - request.ctxt, - request.context['merchant_id'], - body.to_db()) - - return models.Usage.from_db(row) - - @wsme_pecan.wsexpose([models.Usage], [Query]) - def get_all(self, q=[]): - criterion = _query_to_criterion( - q, - merchant_id=request.context['merchant_id']) - - rows = rater_api.list_usages( - request.ctxt, criterion=criterion) - - return map(models.Usage.from_db, rows) diff --git a/billingstack/api/v2/models.py b/billingstack/api/v2/models.py deleted file mode 100644 index 58cccda..0000000 --- a/billingstack/api/v2/models.py +++ /dev/null @@ -1,221 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from wsme.types import text, DictType -from datetime import datetime - -from billingstack.api.base import ModelBase, property_type -from billingstack.openstack.common import log - -LOG = log.getLogger(__name__) - - -class Base(ModelBase): - id = text - - -class DescribedBase(Base): - name = text - title = text - description = text - - -def change_suffixes(data, keys, shorten=True, suffix='_name'): - """ - Loop thro the keys foreach key setting for example - 'currency_name' > 'currency' - """ - for key in keys: - if shorten: - new, old = key, key + suffix - else: - new, old = key + suffix, key - if old in data: - if new in data: - raise RuntimeError("Can't override old key with new key") - - data[new] = data.pop(old) - - -class Currency(DescribedBase): - pass - - -class Language(DescribedBase): - pass - - -class InvoiceState(DescribedBase): - pass - - -class PGProvider(DescribedBase): - def __init__(self, **kw): - #kw['methods'] = [PGMethod.from_db(m) for m in kw.get('methods', [])] - super(PGProvider, self).__init__(**kw) - - methods = [DictType(key_type=text, value_type=property_type)] - properties = DictType(key_type=text, value_type=property_type) - - -class ContactInfo(Base): - id = text - first_name = text - last_name = text - company = text - address1 = text - address2 = text - address3 = text - locality = text - region = text - country_name = text - postal_code = text - - phone = text - email = text - website = text - - -class PlanItem(ModelBase): - name = text - title = text - description = text - - plan_id = text - product_id = text - - pricing = [DictType(key_type=text, value_type=property_type)] - - -class Plan(DescribedBase): - def __init__(self, **kw): - if 'items' in kw: - kw['items'] = map(PlanItem.from_db, kw.pop('items')) - super(Plan, self).__init__(**kw) - - items = [PlanItem] - properties = DictType(key_type=text, value_type=property_type) - - -class Product(DescribedBase): - properties = DictType(key_type=text, value_type=property_type) - pricing = [DictType(key_type=text, value_type=property_type)] - - -class InvoiceLine(Base): - description = text - price = float - quantity = float - sub_total = float - invoice_id = text - - -class Invoice(Base): - identifier = text - sub_total = float - tax_percentage = float - tax_total = float - total = float - - -class Subscription(Base): - billing_day = int - resource_id = text - resource_type = text - - plan_id = text - customer_id = text - payment_method_id = text - - -class Usage(Base): - measure = text - start_timestamp = datetime - end_timestamp = datetime - price = float - total = float - value = float - merchant_id = text - product_id = text - subscription_id = text - - -class PGConfig(Base): - name = text - title = text - - merchant_id = text - provider_id = text - - state = text - - properties = DictType(key_type=text, value_type=property_type) - - -class PaymentMethod(Base): - name = text - identifier = text - expires = text - - merchant_id = text - customer_id = text - provider_config_id = text - - state = text - - properties = DictType(key_type=text, value_type=property_type) - - -class Account(Base): - _keys = ['currency', 'language'] - - currency = text - language = text - - name = text - - -class Merchant(Account): - default_gateway = text - - def to_db(self): - values = self.as_dict() - change_suffixes(values, self._keys, shorten=False) - return values - - @classmethod - def from_db(cls, values): - change_suffixes(values, cls._keys) - return cls(**values) - - -class Customer(Account): - merchant_id = text - contact_info = [ContactInfo] - - def __init__(self, **kw): - infos = kw.get('contact_info', {}) - kw['contact_info'] = [ContactInfo.from_db(i) for i in infos] - super(Customer, self).__init__(**kw) - - def to_db(self): - values = self.as_dict() - change_suffixes(values, self._keys, shorten=False) - return values - - @classmethod - def from_db(cls, values): - change_suffixes(values, cls._keys) - return cls(**values) diff --git a/billingstack/biller/__init__.py b/billingstack/biller/__init__.py deleted file mode 100644 index 7c6e629..0000000 --- a/billingstack/biller/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:biller', title="Configuration for Biller Service" -)) - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:biller') diff --git a/billingstack/biller/rpcapi.py b/billingstack/biller/rpcapi.py deleted file mode 100644 index faa0f68..0000000 --- a/billingstack/biller/rpcapi.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('biller_topic', default='biller', - help='the topic biller nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class BillerAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(BillerAPI, self).__init__( - topic=cfg.CONF.biller_topic, - default_version=self.BASE_RPC_VERSION) - - # Invoice States - def create_invoice_state(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_invoice_state', - values=values)) - - def list_invoice_states(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_states', - criterion=criterion)) - - def get_invoice_state(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice_state', id_=id_)) - - def update_invoice_state(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_state', - id_=id_, values=values)) - - def delete_invoice_state(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice_state', id_=id_)) - - # Invoices - def create_invoice(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_invoice', - merchant_id=merchant_id, values=values)) - - def list_invoices(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoices', - criterion=criterion)) - - def get_invoice(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice', id_=id_)) - - def update_invoice(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice', id_=id_, - values=values)) - - def delete_invoice(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice', id_=id_)) - - # Invoice lines - def create_invoice_line(self, ctxt, invoice_id, values): - return self.call(ctxt, self.make_msg('create_invoice_line', - invoice_id=invoice_id, values=values)) - - def list_invoice_lines(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_invoice_lines', - criterion=criterion)) - - def get_invoice_line(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_invoice_line', id_=id_)) - - def update_invoice_line(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_invoice_line', id_=id_, - values=values)) - - def delete_invoice_line(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_invoice_line', id_=id_)) - - -biller_api = BillerAPI() diff --git a/billingstack/biller/service.py b/billingstack/biller/service.py deleted file mode 100644 index bedc8c6..0000000 --- a/billingstack/biller/service.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service as os_service -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.storage.utils import get_connection -from billingstack import service as bs_service - - -cfg.CONF.import_opt('biller_topic', 'billingstack.biller.rpcapi') -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - """ - Biller service - """ - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.biller_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - def start(self): - self.storage_conn = get_connection('biller') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - def create_invoice_state(self, ctxt, values): - return self.storage_conn.create_invoice_state(ctxt, values) - - def list_invoice_states(self, ctxt, **kw): - return self.storage_conn.list_invoice_states(ctxt, **kw) - - def get_invoice_state(self, ctxt, id_): - return self.storage_conn.get_invoice_state(ctxt, id_) - - def update_invoice_state(self, ctxt, id_, values): - return self.storage_conn.update_invoice_state(ctxt, id_, values) - - def delete_invoice_state(self, ctxt, id_): - return self.storage_conn.delete_invoice_state(ctxt, id_) - - def create_invoice(self, ctxt, merchant_id, values): - return self.storage_conn.create_invoice_state( - ctxt, merchant_id, values) - - def list_invoices(self, ctxt, **kw): - return self.storage_conn.list_invoices(ctxt, **kw) - - def get_invoice(self, ctxt, id_): - return self.storage_conn.get_invoice(ctxt, id_) - - def update_invoice(self, ctxt, id_, values): - return self.storage_conn.update_invoice(ctxt, id_, values) - - def delete_invoice(self, ctxt, id_): - return self.storage_conn.delete_invoice(ctxt, id_) - - def create_invoice_line(self, ctxt, invoice_id, values): - return self.storage_conn.create_invoice_line_state( - ctxt, invoice_id, values) - - def list_invoice_lines(self, ctxt, **kw): - return self.storage_conn.list_invoice_lines(ctxt, **kw) - - def get_invoice_line(self, ctxt, id_): - return self.storage_conn.get_invoice_line(ctxt, id_) - - def update_invoice_line(self, ctxt, id_, values): - return self.storage_conn.update_invoice_line(ctxt, id_, values) - - def delete_invoice_line(self, ctxt, id_): - return self.storage_conn.delete_invoice_line(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:biller'].workers) - launcher.wait() diff --git a/billingstack/biller/storage/__init__.py b/billingstack/biller/storage/__init__.py deleted file mode 100644 index f9024d0..0000000 --- a/billingstack/biller/storage/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from billingstack.storage import base - - -class StorageEngine(base.StorageEngine): - """Base class for the biller storage""" - __plugin_ns__ = 'billingstack.biller.storage' - - -class Connection(base.Connection): - """Define the base API for biller storage""" diff --git a/billingstack/biller/storage/impl_sqlalchemy.py b/billingstack/biller/storage/impl_sqlalchemy.py deleted file mode 100644 index aeef60e..0000000 --- a/billingstack/biller/storage/impl_sqlalchemy.py +++ /dev/null @@ -1,246 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A Usage plugin using sqlalchemy... -""" - -from oslo.config import cfg -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import Column, ForeignKey -from sqlalchemy import DateTime, Float, Unicode -from sqlalchemy.orm import relationship - -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import UUID -from billingstack.sqlalchemy import api, model_base, session - -from billingstack.biller.storage import Connection, StorageEngine -from billingstack.central import rpcapi as central_api - -# DB SCHEMA -BASE = declarative_base(cls=model_base.ModelBase) - -LOG = logging.getLogger(__name__) - - -cfg.CONF.register_group(cfg.OptGroup( - name='biller:sqlalchemy', title='Config for biller sqlalchemy plugin')) - - -cfg.CONF.register_opts(session.SQLOPTS, group='biller:sqlalchemy') - - -class InvoiceState(BASE): - """ - A State representing the currented state a Invoice is in - - Example: - Completed, Failed - """ - name = Column(Unicode(60), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - description = Column(Unicode(255)) - - -class Invoice(BASE, model_base.BaseMixin): - """ - An invoice - """ - identifier = Column(Unicode(255), nullable=False) - due = Column(DateTime, ) - - sub_total = Column(Float) - tax_percentage = Column(Float) - tax_total = Column(Float) - total = Column(Float) - - customer_id = Column(UUID, nullable=False) - - line_items = relationship('InvoiceLine', backref='invoice_lines') - - state = relationship('InvoiceState', backref='invoices') - state_id = Column(Unicode(60), ForeignKey('invoice_state.name'), - nullable=False) - - # Keep track of the currency and merchant - currency_name = Column(Unicode(10), nullable=False) - merchant_id = Column(UUID, nullable=False) - - -class InvoiceLine(BASE, model_base.BaseMixin): - """ - A Line item in which makes up the Invoice - """ - description = Column(Unicode(255)) - price = Column(Float) - quantity = Column(Float) - sub_total = Column(Float) - - invoice_id = Column(UUID, ForeignKey('invoice.id', ondelete='CASCADE', - onupdate='CASCADE'), nullable=False) - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection() - - -class Connection(Connection, api.HelpersMixin): - def __init__(self): - self.setup('biller:sqlalchemy') - - def base(self): - return BASE - - # Invoice States - def create_invoice_state(self, ctxt, values): - """ - Add a supported invoice_state to the database - """ - row = InvoiceState(**values) - self._save(row) - return dict(row) - - def list_invoice_states(self, ctxt, **kw): - rows = self._list(InvoiceState, **kw) - return map(dict, rows) - - def get_invoice_state(self, ctxt, id_): - row = self._get_id_or_name(InvoiceState, id_) - return dict(row) - - def update_invoice_state(self, ctxt, id_, values): - row = self._update(InvoiceState, id_, values, by_name=True) - return dict(row) - - def delete_invoice_state(self, ctxt, id_): - self._delete(InvoiceState, id_, by_name=True) - - # Invoices - def _invoice(self, row): - invoice = dict(row) - return invoice - - def create_invoice(self, ctxt, merchant_id, values): - """ - Add a new Invoice - - :param merchant_id: The Merchant - :param values: Values describing the new Invoice - """ - merchant = central_api.get_merchant(merchant_id) - - invoice = Invoice(**values) - invoice.merchant = merchant - - self._save(invoice) - return self._invoice(invoice) - - def list_invoices(self, ctxt, **kw): - """ - List Invoices - """ - rows = self._list(Invoice, **kw) - return map(self._invoice, rows) - - def get_invoice(self, ctxt, id_): - """ - Get a Invoice - - :param id_: The Invoice ID - """ - row = self._get(Invoice, id_) - return self.invoice(row) - - def update_invoice(self, ctxt, id_, values): - """ - Update a Invoice - - :param id_: The Invoice ID - :param values: Values to update with - """ - row = self._get(Invoice, id_) - row.update(values) - - self._save(row) - return self._invoice(row) - - def delete_invoice(self, ctxt, id_): - """ - Delete a Invoice - - :param id_: Invoice ID - """ - self._delete(Invoice, id_) - - # Invoices Items - def _invoice_line(self, row): - line = dict(row) - return line - - def create_invoice_items(self, ctxt, invoice_id, values): - """ - Add a new Invoice - - :param invoice_id: The Invoice - :param values: Values describing the new Invoice Line - """ - invoice = self._get(Invoice, invoice_id) - - line = InvoiceLine(**values) - line.invoice = invoice - - self._save(line) - return self._invoice_line(line) - - def list_invoice_lines(self, ctxt, **kw): - """ - List Invoice Lines - """ - rows = self._list(InvoiceLine, **kw) - return map(self._invoice_line, rows) - - def get_invoice_line(self, ctxt, id_): - """ - Get a Invoice Line - - :param id_: The Invoice Line ID - """ - row = self._get(InvoiceLine, id_) - return self._invoice_line(row) - - def update_invoice_line(self, ctxt, id_, values): - """ - Update a Invoice Line - - :param id_: The Invoice ID - :param values: Values to update with - """ - row = self._get(InvoiceLine, id_) - row.update(values) - - self._save(row) - return self._invoice_line(row) - - def delete_invoice_line(self, ctxt, id_): - """ - Delete a Invoice Line - - :param id_: Invoice Line ID - """ - self._delete(InvoiceLine, id_) diff --git a/billingstack/central/__init__.py b/billingstack/central/__init__.py deleted file mode 100644 index b84add9..0000000 --- a/billingstack/central/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:central', title="Configuration for Central Service" -)) - - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:central') diff --git a/billingstack/central/flows/__init__.py b/billingstack/central/flows/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/central/flows/merchant.py b/billingstack/central/flows/merchant.py deleted file mode 100644 index 29ab1e9..0000000 --- a/billingstack/central/flows/merchant.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in co68mpliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow.patterns import linear_flow - -from billingstack import tasks -from billingstack.openstack.common import log - -ACTION = 'merchant:create' - -LOG = log.getLogger(__name__) - - -class EntryCreateTask(tasks.RootTask): - def __init__(self, storage, **kw): - super(EntryCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - return self.storage.create_merchant(ctxt, values) - - -def create_flow(storage): - flow = linear_flow.Flow(ACTION) - - entry_task = EntryCreateTask(storage, provides='merchant', prefix=ACTION) - flow.add(entry_task) - - return flow diff --git a/billingstack/central/rpcapi.py b/billingstack/central/rpcapi.py deleted file mode 100644 index cbca8be..0000000 --- a/billingstack/central/rpcapi.py +++ /dev/null @@ -1,211 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('central_topic', default='central', - help='the topic central nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class CentralAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(CentralAPI, self).__init__( - topic=cfg.CONF.central_topic, - default_version=self.BASE_RPC_VERSION) - - # Currency - def create_currency(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_currency', values=values)) - - def list_currencies(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_currencies', - criterion=criterion)) - - def get_currency(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_currency', - id_=id_)) - - def update_currency(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_currency', - id_=id_, values=values)) - - def delete_currency(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_currency', - id_=id_)) - - # Language - def create_language(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_language', values=values)) - - def list_languages(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_languages', - criterion=criterion)) - - def get_language(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_language', id_=id_)) - - def update_language(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_language', - id_=id_, values=values)) - - def delete_language(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_language', id_=id_)) - - # Contact Info - def create_contact_info(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('create_contact_info', id_=id_, - values=values)) - - def get_contact_info(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_contact_info', id_)) - - def update_contact_info(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_contact_info', id_=id_, - values=values)) - - def delete_contact_info(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_contact_info', id_=id_)) - - # Merchant - def create_merchant(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_merchant', values=values)) - - def list_merchants(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_merchants', - criterion=criterion)) - - def get_merchant(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_merchant', id_=id_)) - - def update_merchant(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_merchant', - id_=id_, values=values)) - - def delete_merchant(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_merchant', - id_=id_)) - - # Customer - def create_customer(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_customer', - merchant_id=merchant_id, values=values)) - - def list_customers(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_customers', - criterion=criterion)) - - def get_customer(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_customer', id_=id_)) - - def update_customer(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_customer', - id_=id_, values=values)) - - def delete_customer(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_customer', id_=id_)) - - # Plans - def create_plan(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_plan', - merchant_id=merchant_id, values=values)) - - def list_plans(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plans', - criterion=criterion)) - - def get_plan(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan', id_=id_)) - - def update_plan(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_plan', id_=id_, - values=values)) - - def delete_plan(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_plan', id_=id_)) - - def get_plan_by_subscription(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_plan_by_subscription', - id_=id_)) - - # PlanItems - def create_plan_item(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_plan_item', - values=values)) - - def list_plan_items(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_plan_items', - criterion=criterion)) - - def get_plan_item(self, ctxt, plan_id, product_id): - return self.call(ctxt, self.make_msg('get_plan_item', - plan_id=plan_id, product_id=product_id)) - - def update_plan_item(self, ctxt, plan_id, product_id, values): - return self.call(ctxt, self.make_msg('update_plan_item', - plan_id=plan_id, product_id=product_id, - values=values)) - - def delete_plan_item(self, ctxt, plan_id, product_id): - return self.call(ctxt, self.make_msg('delete_plan_item', - plan_id=plan_id, product_id=product_id)) - - # Products - def create_product(self, ctxt, merchant_id, values): - return self.call(ctxt, self.make_msg('create_product', - merchant_id=merchant_id, values=values)) - - def list_products(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_products', - criterion=criterion)) - - def get_product(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_product', id_=id_)) - - def update_product(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_product', id_=id_, - values=values)) - - def delete_product(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_product', id_=id_)) - - # Subscriptions - def create_subscription(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_subscription', - values=values)) - - def list_subscriptions(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_subscriptions', - criterion=criterion)) - - def get_subscription(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_subscription', id_=id_)) - - def update_subscription(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_subscription', id_=id_, - values=values)) - - def delete_subscription(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_subscription', id_=id_)) - - -central_api = CentralAPI() diff --git a/billingstack/central/service.py b/billingstack/central/service.py deleted file mode 100644 index 54a757c..0000000 --- a/billingstack/central/service.py +++ /dev/null @@ -1,215 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -from oslo.config import cfg -from taskflow.engines import run as run_flow - - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.openstack.common import service as os_service -from billingstack.central.flows import merchant -from billingstack.storage.utils import get_connection -from billingstack import service as bs_service - - -cfg.CONF.import_opt('central_topic', 'billingstack.central.rpcapi') -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.central_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - def start(self): - self.storage_conn = get_connection('central') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - # Currency - def create_currency(self, ctxt, values): - return self.storage_conn.create_currency(ctxt, values) - - def list_currencies(self, ctxt, **kw): - return self.storage_conn.list_currencies(ctxt, **kw) - - def get_currency(self, ctxt, id_): - return self.storage_conn.get_currency(ctxt, id_) - - def update_currency(self, ctxt, id_, values): - return self.storage_conn.update_currency(ctxt, id_, values) - - def delete_currency(self, ctxt, id_): - return self.storage_conn.delete_currency(ctxt, id_) - - # Language - def create_language(self, ctxt, values): - return self.storage_conn.create_language(ctxt, values) - - def list_languages(self, ctxt, **kw): - return self.storage_conn.list_languages(ctxt, **kw) - - def get_language(self, ctxt, id_): - return self.storage_conn.get_language(ctxt, id_) - - def update_language(self, ctxt, id_, values): - return self.storage_conn.update_language(ctxt, id_, values) - - def delete_language(self, ctxt, id_): - return self.storage_conn.delete_language(ctxt, id_) - - # Contact Info - def create_contact_info(self, ctxt, obj, values, cls=None, - rel_attr='contact_info'): - return self.storage_conn.create_contact_info(ctxt, values) - - def get_contact_info(self, ctxt, id_): - return self.storage_conn.get_contact_info(ctxt, id_) - - def update_contact_info(self, ctxt, id_, values): - return self.storage_conn.update_contact_info(ctxt, values) - - def delete_contact_info(self, ctxt, id_): - return self.storage_conn.delete_contact_info(ctxt, id_) - - # PGP - def list_pg_providers(self, ctxt, **kw): - return self.storage_conn.list_pg_providers(ctxt, **kw) - - def get_pg_provider(self, ctxt, pgp_id): - return self.storage_conn.get_pg_provider(ctxt, pgp_id) - - # Merchant - def create_merchant(self, ctxt, values): - flow = merchant.create_flow(self.storage_conn) - result = run_flow(flow, engine_conf="parallel", - store={'values': values, 'ctxt': ctxt}) - return result['merchant'] - - def list_merchants(self, ctxt, **kw): - return self.storage_conn.list_merchants(ctxt, **kw) - - def get_merchant(self, ctxt, id_): - return self.storage_conn.get_merchant(ctxt, id_) - - def update_merchant(self, ctxt, id_, values): - return self.storage_conn.update_merchant(ctxt, id_, values) - - def delete_merchant(self, ctxt, id_): - return self.storage_conn.delete_merchant(ctxt, id_) - - # Customer - def create_customer(self, ctxt, merchant_id, values): - return self.storage_conn.create_customer(ctxt, merchant_id, values) - - def list_customers(self, ctxt, **kw): - return self.storage_conn.list_customers(ctxt, **kw) - - def get_customer(self, ctxt, id_): - return self.storage_conn.get_customer(ctxt, id_) - - def update_customer(self, ctxt, id_, values): - return self.storage_conn.update_customer(ctxt, id_, values) - - def delete_customer(self, ctxt, id_): - return self.storage_conn.delete_customer(ctxt, id_) - - # Plans - def create_plan(self, ctxt, merchant_id, values): - return self.storage_conn.create_plan(ctxt, merchant_id, values) - - def list_plans(self, ctxt, **kw): - return self.storage_conn.list_plans(ctxt, **kw) - - def get_plan(self, ctxt, id_): - return self.storage_conn.get_plan(ctxt, id_) - - def update_plan(self, ctxt, id_, values): - return self.storage_conn.update_plan(ctxt, id_, values) - - def delete_plan(self, ctxt, id_): - return self.storage_conn.delete_plan(ctxt, id_) - - def get_plan_by_subscription(self, ctxt, id_): - return self.storage_conn.get_plan_by_subscription(ctxt, id_) - - # PlanItems - def create_plan_item(self, ctxt, values): - return self.storage_conn.create_plan_item(ctxt, values) - - def list_plan_items(self, ctxt, **kw): - return self.storage_conn.list_plan_items(ctxt, **kw) - - def get_plan_item(self, ctxt, plan_id, product_id): - return self.storage_conn.get_plan_item(ctxt, plan_id, product_id) - - def update_plan_item(self, ctxt, plan_id, product_id, values): - return self.storage_conn.update_plan_item( - ctxt, plan_id, product_id, values) - - def delete_plan_item(self, ctxt, plan_id, product_id): - return self.storage_conn.delete_plan_item(ctxt, plan_id, product_id) - - # Products - def create_product(self, ctxt, merchant_id, values): - return self.storage_conn.create_product(ctxt, merchant_id, values) - - def list_products(self, ctxt, **kw): - return self.storage_conn.list_products(ctxt, **kw) - - def get_product(self, ctxt, id_): - return self.storage_conn.get_product(ctxt, id_) - - def update_product(self, ctxt, id_, values): - return self.storage_conn.update_product(ctxt, id_, values) - - def delete_product(self, ctxt, id_): - return self.storage_conn.delete_product(ctxt, id_) - - # Subscriptions - def create_subscription(self, ctxt, values): - return self.storage_conn.create_subscription(ctxt, values) - - def list_subscriptions(self, ctxt, **kw): - return self.storage_conn.list_subscriptions(ctxt, **kw) - - def get_subscription(self, ctxt, id_): - return self.storage_conn.get_subscription(ctxt, id_) - - def update_subscription(self, ctxt, id_, values): - return self.storage_conn.update_subscription(ctxt, id_, values) - - def delete_subscription(self, ctxt, id_): - return self.storage_conn.delete_subscription(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:central'].workers) - launcher.wait() diff --git a/billingstack/central/storage/__init__.py b/billingstack/central/storage/__init__.py deleted file mode 100644 index 1ebda20..0000000 --- a/billingstack/central/storage/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from billingstack.openstack.common import log as logging -from billingstack.storage import base - - -LOG = logging.getLogger(__name__) - - -class StorageEngine(base.StorageEngine): - __plugin_type__ = 'central' - __plugin_ns__ = 'billingstack.central.storage' - - -class Connection(base.Connection): - pass diff --git a/billingstack/central/storage/impl_sqlalchemy/__init__.py b/billingstack/central/storage/impl_sqlalchemy/__init__.py deleted file mode 100644 index 60b6434..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/__init__.py +++ /dev/null @@ -1,502 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy.orm import exc -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack import exceptions -from billingstack import utils as common_utils -from billingstack.sqlalchemy import utils as db_utils, api -from billingstack.sqlalchemy.session import SQLOPTS -from billingstack.central.storage import Connection, StorageEngine -from billingstack.central.storage.impl_sqlalchemy import models - - -LOG = logging.getLogger(__name__) - -cfg.CONF.register_group(cfg.OptGroup( - name='central:sqlalchemy', title="Configuration for SQLAlchemy Storage" -)) - -cfg.CONF.register_opts(SQLOPTS, group='central:sqlalchemy') - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection(self.name) - - -class Connection(Connection, api.HelpersMixin): - """ - SQLAlchemy connection - """ - def __init__(self, config_group): - self.setup(config_group) - - def base(self): - return models.BASE - - def set_properties(self, obj, properties, cls=None, rel_attr='properties', - purge=False): - """ - Set's a dict with key values on a relation on the row - - :param obj: Either a row object or a id to use in connection with cls - :param properties: Key and Value dict with props to set. 1 row item. - :param cls: The class to use if obj isn't a row to query. - :param rel_attr: The relation attribute name to get the class to use - :param purge: Purge entries that doesn't exist in existing but in DB - """ - row = self._get_row(obj, cls=cls) - - existing = self._kv_rows(row[rel_attr]) - - for key, value in properties.items(): - values = {'name': key, 'value': value} - - if key not in existing: - rel_row = self._make_rel_row(row, rel_attr, values) - row[rel_attr].append(rel_row) - else: - existing[key].update(values) - - if purge: - for key in existing: - if not key in properties: - row[rel_attr].remove(existing[key]) - - # Currency - def create_currency(self, ctxt, values): - """ - Add a supported currency to the database - """ - data = common_utils.get_currency(values['name']) - row = models.Currency(**data) - self._save(row) - return dict(row) - - def list_currencies(self, ctxt, **kw): - rows = self._list(models.Currency, **kw) - return map(dict, rows) - - def get_currency(self, ctxt, id_): - row = self._get_id_or_name(models.Currency, id_) - return dict(row) - - def update_currency(self, ctxt, id_, values): - row = self._update(models.Currency, id_, values, by_name=True) - return dict(row) - - def delete_currency(self, ctxt, id_): - self._delete(models.Currency, id_, by_name=True) - - # Language - def create_language(self, ctxt, values): - """ - Add a supported language to the database - """ - data = common_utils.get_language(values['name']) - row = models.Language(**data) - self._save(row) - return dict(row) - - def list_languages(self, ctxt, **kw): - rows = self._list(models.Language, **kw) - return map(dict, rows) - - def get_language(self, ctxt, id_): - row = self._get_id_or_name(models.Language, id_) - return dict(row) - - def update_language(self, ctxt, id_, values): - row = self._update(models.Language, id_, values, by_name=True) - return dict(row) - - def delete_language(self, ctxt, id_): - self._delete(models.Language, id_, by_name=True) - - # ContactInfo - def create_contact_info(self, ctxt, obj, values, cls=None, - rel_attr='contact_info'): - """ - :param entity: The object to add the contact_info to - :param values: The values to add - """ - row = self._get_row(obj, cls=cls) - - rel_row = self._make_rel_row(obj, rel_attr, values) - - local, remote = db_utils.get_prop_names(row) - - if rel_attr in remote: - if isinstance(row[rel_attr], list): - row[rel_attr].append(rel_row) - else: - row[rel_attr] = rel_row - else: - msg = 'Attempted to set non-relation %s' % rel_attr - raise exceptions.BadRequest(msg) - - if cls: - self._save(rel_row) - return dict(rel_row) - else: - return rel_row - - def get_contact_info(self, ctxt, id_): - self._get(models.ContactInfo, id_) - - def update_contact_info(self, ctxt, id_, values): - return self._update(models.ContactInfo, id_, values) - - def delete_contact_info(self, ctxt, id_): - self._delete(models.ContactInfo, id_) - - # Merchant - def create_merchant(self, ctxt, values): - row = models.Merchant(**values) - - self._save(row) - return dict(row) - - def list_merchants(self, ctxt, **kw): - rows = self._list(models.Merchant, **kw) - return map(dict, rows) - - def get_merchant(self, ctxt, id_): - row = self._get(models.Merchant, id_) - return dict(row) - - def update_merchant(self, ctxt, id_, values): - row = self._update(models.Merchant, id_, values) - return dict(row) - - def delete_merchant(self, ctxt, id_): - self._delete(models.Merchant, id_) - - # Customer - def _customer(self, row): - data = dict(row) - - data['contact_info'] = [dict(i) for i in row.contact_info] - data['default_info'] = dict(row.default_info) if row.default_info\ - else {} - return data - - def create_customer(self, ctxt, merchant_id, values): - merchant = self._get(models.Merchant, merchant_id) - - contact_info = values.pop('contact_info', None) - customer = models.Customer(**values) - merchant.customers.append(customer) - - if contact_info: - info_row = self.create_contact_info(ctxt, customer, contact_info) - customer.default_info = info_row - - self._save(customer) - return self._customer(customer) - - def list_customers(self, ctxt, **kw): - rows = self._list(models.Customer, **kw) - return map(dict, rows) - - def get_customer(self, ctxt, id_): - row = self._get(models.Customer, id_) - return self._customer(row) - - def update_customer(self, ctxt, id_, values): - row = self._update(models.Customer, id_, values) - return self._customer(row) - - def delete_customer(self, ctxt, id_): - return self._delete(models.Customer, id_) - - def _entity(self, row): - """ - Helper to serialize a entity like a Product or a Plan - - :param row: The Row. - """ - entity = dict(row) - if hasattr(row, 'properties'): - entity['properties'] = self._kv_rows( - row.properties, func=lambda i: i['value']) - if hasattr(row, 'pricing'): - entity['pricing'] = row.pricing or [] - return entity - - # Plan - def _plan(self, row): - plan = self._entity(row) - plan['items'] = map(self._plan_item, row.plan_items) if row.plan_items\ - else [] - return plan - - def create_plan(self, ctxt, merchant_id, values): - """ - Add a new Plan - - :param merchant_id: The Merchant - :param values: Values describing the new Plan - """ - merchant = self._get(models.Merchant, merchant_id) - - properties = values.pop('properties', {}) - - plan = models.Plan(**values) - - plan.merchant = merchant - self.set_properties(plan, properties) - - self._save(plan) - return self._plan(plan) - - def list_plans(self, ctxt, **kw): - """ - List Plan - - :param merchant_id: The Merchant to list it for - """ - rows = self._list(models.Plan, **kw) - return map(self._plan, rows) - - def get_plan(self, ctxt, id_): - """ - Get a Plan - - :param id_: The Plan ID - """ - row = self._get(models.Plan, id_) - return self._plan(row) - - def update_plan(self, ctxt, id_, values): - """ - Update a Plan - - :param id_: The Plan ID - :param values: Values to update with - """ - properties = values.pop('properties', {}) - - row = self._get(models.Plan, id_) - row.update(values) - - self.set_properties(row, properties) - - self._save(row) - return self._plan(row) - - def delete_plan(self, ctxt, id_): - """ - Delete a Plan - - :param id_: Plan ID - """ - self._delete(models.Plan, id_) - - def get_plan_by_subscription(self, ctxt, subscription_id): - q = self.session.query(models.Plan).join(models.Subscription)\ - .filter(models.Subscription.id == subscription_id) - try: - row = q.one() - except exc.NoResultFound: - msg = 'Couldn\'t find any Plan for subscription %s' % \ - subscription_id - raise exceptions.NotFound(msg) - return self._plan(row) - - # PlanItemw - def _plan_item(self, row): - entity = self._entity(row) - entity['name'] = row.product.name - entity['title'] = row.title or row.product.title - entity['description'] = row.description or row.product.description - return entity - - def create_plan_item(self, ctxt, values): - row = models.PlanItem(**values) - self._save(row) - return self._entity(row) - - def list_plan_items(self, ctxt, **kw): - return self._list(models.PlanItem, **kw) - - def get_plan_item(self, ctxt, plan_id, product_id, criterion={}): - criterion.update({'plan_id': plan_id, 'product_id': product_id}) - row = self._get(models.PlanItem, criterion=criterion) - return self._entity(row) - - def update_plan_item(self, ctxt, plan_id, product_id, values): - criterion = {'plan_id': plan_id, 'product_id': product_id} - row = self._get(models.PlanItem, criterion=criterion) - row.update(values) - self._save(row) - return self._entity(row) - - def delete_plan_item(self, ctxt, plan_id, product_id): - """ - Remove a Product from a Plan by deleting the PlanItem. - - :param plan_id: The Plan's ID. - :param product_id: The Product's ID. - """ - query = self.session.query(models.PlanItem).\ - filter_by(plan_id=plan_id, product_id=product_id) - - count = query.delete() - if count == 0: - msg = 'Couldn\'t match plan_id %s or product_id %s' % ( - plan_id, product_id) - raise exceptions.NotFound(msg) - - # Products - def _product(self, row): - product = self._entity(row) - return product - - def create_product(self, ctxt, merchant_id, values): - """ - Add a new Product - - :param merchant_id: The Merchant - :param values: Values describing the new Product - """ - values = values.copy() - - merchant = self._get(models.Merchant, merchant_id) - - properties = values.pop('properties', {}) - - product = models.Product(**values) - product.merchant = merchant - - self.set_properties(product, properties) - - self._save(product) - return self._product(product) - - def list_products(self, ctxt, **kw): - """ - List Products - - :param merchant_id: The Merchant to list it for - """ - rows = self._list(models.Product, **kw) - return map(self._product, rows) - - def get_product(self, ctxt, id_): - """ - Get a Product - - :param id_: The Product ID - """ - row = self._get(models.Product, id_) - return self._product(row) - - def update_product(self, ctxt, id_, values): - """ - Update a Product - - :param id_: The Product ID - :param values: Values to update with - """ - values = values.copy() - properties = values.pop('properties', {}) - - row = self._get(models.Product, id_) - row.update(values) - - self.set_properties(row, properties) - - self._save(row) - return self._product(row) - - def delete_product(self, ctxt, id_): - """ - Delete a Product - - :param id_: Product ID - """ - self._delete(models.Product, id_) - - # Subscriptions - def _subscription(self, row): - subscription = dict(row) - return subscription - - def create_subscription(self, ctxt, values): - """ - Add a new Subscription - - :param merchant_id: The Merchant - :param values: Values describing the new Subscription - """ - subscription = models.Subscription(**values) - - self._save(subscription) - return self._subscription(subscription) - - def list_subscriptions(self, ctxt, criterion=None, **kw): - """ - List Subscriptions - - :param merchant_id: The Merchant to list it for - """ - query = self.session.query(models.Subscription) - - # NOTE: Filter needs to be joined for merchant_id - query = db_utils.filter_merchant_by_join( - query, models.Customer, criterion) - - rows = self._list( - query=query, - cls=models.Subscription, - criterion=criterion, - **kw) - - return map(self._subscription, rows) - - def get_subscription(self, ctxt, id_): - """ - Get a Subscription - - :param id_: The Subscription ID - """ - row = self._get(models.Subscription, id_) - return self._subscription(row) - - def update_subscription(self, ctxt, id_, values): - """ - Update a Subscription - - :param id_: The Subscription ID - :param values: Values to update with - """ - row = self._get(models.Subscription, id_) - row.update(values) - - self._save(row) - return self._subscription(row) - - def delete_subscription(self, ctxt, id_): - """ - Delete a Subscription - - :param id_: Subscription ID - """ - self._delete(models.Subscription, id_) diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/README.md b/billingstack/central/storage/impl_sqlalchemy/migration/README.md deleted file mode 100644 index 2867029..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright 2012 New Dream Network, LLC (DreamHost) -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# @author Mark McClain (DreamHost) - -The migrations in the alembic/versions contain the changes needed to migrate -from older billingstack releases to newer versions. A migration occurs by executing -a script that details the changes needed to upgrade/downgrade the database. The -migration scripts are ordered so that multiple scripts can run sequentially to -update the database. The scripts are executed by billingstack's migration wrapper -which uses the Alembic library to manage the migration. billingstack supports -migration from Folsom or later. - - -If you are a deployer or developer and want to migrate from Folsom to Grizzly -or later you must first add version tracking to the database: - -$ billingstack-db-manage -config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini stamp folsom - -You can then upgrade to the latest database version via: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini upgrade head - -To check the current database version: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini current - -To create a script to run the migration offline: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ - --config-file /path/to/plugin/config.ini upgrade head --sql - -To run the offline migration between specific migration versions: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini upgrade \ -: --sql - -Upgrade the database incrementally: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini upgrade --delta <# of revs> - -Downgrade the database by a certain number of revisions: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini downgrade --delta <# of revs> - - -DEVELOPERS: -A database migration script is required when you submit a change to billingstack -that alters the database model definition. The migration script is a special -python file that includes code to update/downgrade the database to match the -changes in the model definition. Alembic will execute these scripts in order to -provide a linear migration path between revision. The billingstack-db-manage command -can be used to generate migration template for you to complete. The operations -in the template are those supported by the Alembic migration library. - -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini revision \ --m "description of revision" \ ---autogenerate - -This generates a prepopulated template with the changes needed to match the -database state with the models. You should inspect the autogenerated template -to ensure that the proper models have been altered. - -In rare circumstances, you may want to start with an empty migration template -and manually author the changes necessary for an upgrade/downgrade. You can -create a blank file via: - -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini revision \ --m "description of revision" - -The migration timeline should remain linear so that there is a clear path when -upgrading/downgrading. To verify that the timeline does branch, you can run -this command: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini check_migration - -If the migration path does branch, you can find the branch point via: -$ billingstack-db-manage --config-file /path/to/quantum.conf \ ---config-file /path/to/plugin/config.ini history diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/__init__.py b/billingstack/central/storage/impl_sqlalchemy/migration/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini b/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini deleted file mode 100644 index 3b390b7..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic.ini +++ /dev/null @@ -1,52 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = %(here)s/alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# default to an empty string because the Quantum migration cli will -# extract the correct value and set it programatically before alemic is fully -# invoked. -sqlalchemy.url = - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py deleted file mode 100644 index 5469d1b..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/env.py +++ /dev/null @@ -1,91 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright 2012 New Dream Network, LLC (DreamHost) -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# @author: Mark McClain, DreamHost -# Copied: Quantum - -from logging.config import fileConfig - -from alembic import context -from sqlalchemy import create_engine, pool - -from billingstack.central.storage.impl_sqlalchemy.models import ModelBase - - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config -billingstack_config = config.billingstack_config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# set the target for 'autogenerate' support -target_metadata = ModelBase.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - context.configure(url=billingstack_config['central:sqlalchemy'] - .database_connection) - - with context.begin_transaction(): - context.run_migrations(options=build_options()) - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = create_engine( - billingstack_config['central:sqlalchemy'].database_connection, - poolclass=pool.NullPool) - - connection = engine.connect() - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - try: - with context.begin_transaction(): - context.run_migrations(options=build_options()) - finally: - connection.close() - - -def build_options(): - return {} - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako deleted file mode 100644 index cbb4a7e..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/script.py.mako +++ /dev/null @@ -1,40 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright ${create_date.year} OpenStack LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(options=None): - ${upgrades if upgrades else "pass"} - - -def downgrade(config=None): - ${downgrades if downgrades else "pass"} diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README b/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README deleted file mode 100644 index 4686c76..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/alembic_migrations/versions/README +++ /dev/null @@ -1,3 +0,0 @@ -This directory contains the migration scripts for the billingstack project. Please -see the README in billinstack/db/migration on how to use and generate new -migrations. diff --git a/billingstack/central/storage/impl_sqlalchemy/migration/cli.py b/billingstack/central/storage/impl_sqlalchemy/migration/cli.py deleted file mode 100644 index 24008e1..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/migration/cli.py +++ /dev/null @@ -1,125 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 -# -# Copyright 2012 New Dream Network, LLC (DreamHost) -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# @author: Mark McClain, DreamHost -# Copied: Quantum -import os - -from alembic import command as alembic_command -from alembic import config as alembic_config -from alembic import util as alembic_util - -from oslo.config import cfg -from billingstack.openstack.common.gettextutils import _ - - -_db_opts = [ - cfg.StrOpt('database_connection', - default='', - help=_('URL to database')), -] - -CONF = cfg.ConfigOpts() -CONF.register_opts(_db_opts, 'central:sqlalchemy') - - -def do_alembic_command(config, cmd, *args, **kwargs): - try: - getattr(alembic_command, cmd)(config, *args, **kwargs) - except alembic_util.CommandError, e: - alembic_util.err(str(e)) - - -def do_check_migration(config, cmd): - do_alembic_command(config, 'branches') - - -def do_upgrade_downgrade(config, cmd): - if not CONF.command.revision and not CONF.command.delta: - raise SystemExit(_('You must provide a revision or relative delta')) - - revision = CONF.command.revision - - if CONF.command.delta: - sign = '+' if CONF.command.name == 'upgrade' else '-' - revision = sign + str(CONF.command.delta) - else: - revision = CONF.command.revision - - do_alembic_command(config, cmd, revision, sql=CONF.command.sql) - - -def do_stamp(config, cmd): - do_alembic_command(config, cmd, - CONF.command.revision, - sql=CONF.command.sql) - - -def do_revision(config, cmd): - do_alembic_command(config, cmd, - message=CONF.command.message, - autogenerate=CONF.command.autogenerate, - sql=CONF.command.sql) - - -def add_command_parsers(subparsers): - for name in ['current', 'history', 'branches']: - parser = subparsers.add_parser(name) - parser.set_defaults(func=do_alembic_command) - - parser = subparsers.add_parser('check_migration') - parser.set_defaults(func=do_check_migration) - - for name in ['upgrade', 'downgrade']: - parser = subparsers.add_parser(name) - parser.add_argument('--delta', type=int) - parser.add_argument('--sql', action='store_true') - parser.add_argument('revision', nargs='?') - parser.set_defaults(func=do_upgrade_downgrade) - - parser = subparsers.add_parser('stamp') - parser.add_argument('--sql', action='store_true') - parser.add_argument('revision') - parser.set_defaults(func=do_stamp) - - parser = subparsers.add_parser('revision') - parser.add_argument('-m', '--message') - parser.add_argument('--autogenerate', action='store_true') - parser.add_argument('--sql', action='store_true') - parser.set_defaults(func=do_revision) - - -command_opt = cfg.SubCommandOpt('command', - title='Command', - help=_('Available commands'), - handler=add_command_parsers) - -CONF.register_cli_opt(command_opt) - - -def main(): - config = alembic_config.Config( - os.path.join(os.path.dirname(__file__), 'alembic.ini') - ) - config.set_main_option( - 'script_location', - 'billingstack.central.storage' - '.impl_sqlalchemy.migration:alembic_migrations') - # attach the Quantum conf to the Alembic conf - config.billingstack_config = CONF - - CONF() - CONF.command.func(config, CONF.command.name) diff --git a/billingstack/central/storage/impl_sqlalchemy/models.py b/billingstack/central/storage/impl_sqlalchemy/models.py deleted file mode 100644 index 72f578f..0000000 --- a/billingstack/central/storage/impl_sqlalchemy/models.py +++ /dev/null @@ -1,228 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy import Column, ForeignKey, UniqueConstraint -from sqlalchemy import Integer, Unicode -from sqlalchemy.orm import relationship -from sqlalchemy.ext.declarative import declarative_base, declared_attr - -from billingstack import utils -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import JSON, UUID -from billingstack.sqlalchemy.model_base import ( - ModelBase, BaseMixin, PropertyMixin) - -LOG = logging.getLogger(__name__) - - -BASE = declarative_base(cls=ModelBase) - - -class Currency(BASE): - """ - Allowed currency - """ - name = Column(Unicode(10), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - - -class Language(BASE): - """ - A Language - """ - name = Column(Unicode(10), nullable=False, primary_key=True) - title = Column(Unicode(100), nullable=False) - - -class ContactInfo(BASE, BaseMixin): - """ - Contact Information about an entity like a User, Customer etc... - """ - - @declared_attr - def __mapper_args__(cls): - name = unicode(utils.capital_to_underscore(cls.__name__)) - return {"polymorphic_on": "info_type", "polymorphic_identity": name} - - info_type = Column(Unicode(20), nullable=False) - - first_name = Column(Unicode(100)) - last_name = Column(Unicode(100)) - company = Column(Unicode(100)) - address1 = Column(Unicode(255)) - address2 = Column(Unicode(255)) - address3 = Column(Unicode(255)) - locality = Column(Unicode(60)) - region = Column(Unicode(60)) - country_name = Column(Unicode(100)) - postal_code = Column(Unicode(40)) - - phone = Column(Unicode(100)) - email = Column(Unicode(100)) - website = Column(Unicode(100)) - - -class CustomerInfo(ContactInfo): - id = Column(UUID, ForeignKey("contact_info.id", - onupdate='CASCADE', ondelete='CASCADE'), - primary_key=True) - - customer_id = Column(UUID, ForeignKey('customer.id'), nullable=False) - - -class Merchant(BASE, BaseMixin): - """ - A Merchant is like a Account in Recurly - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(60)) - - customers = relationship('Customer', backref='merchant') - - plans = relationship('Plan', backref='merchant') - products = relationship('Product', backref='merchant') - - currency = relationship('Currency', uselist=False, backref='merchants') - currency_name = Column(Unicode(10), ForeignKey('currency.name'), - nullable=False) - - language = relationship('Language', uselist=False, backref='merchants') - language_name = Column(Unicode(10), ForeignKey('language.name'), - nullable=False) - - -class Customer(BASE, BaseMixin): - """ - A Customer is linked to a Merchant and can have Users related to it - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(60)) - - merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), - nullable=False) - - contact_info = relationship( - 'CustomerInfo', - backref='customer', - primaryjoin='Customer.id == CustomerInfo.customer_id', - lazy='joined') - - default_info = relationship( - 'CustomerInfo', - primaryjoin='Customer.default_info_id == CustomerInfo.id', - uselist=False, - post_update=True) - default_info_id = Column( - UUID, - ForeignKey('customer_info.id', use_alter=True, - onupdate='CASCADE', name='default_info')) - - currency = relationship('Currency', uselist=False, backref='customers') - currency_name = Column(Unicode(10), ForeignKey('currency.name')) - - language = relationship('Language', uselist=False, backref='customers') - language_name = Column(Unicode(10), ForeignKey('language.name')) - - -class Plan(BASE, BaseMixin): - """ - A Product collection like a "Virtual Web Cluster" with 10 servers - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - #provider = Column(Unicode(255), nullable=False) - - plan_items = relationship('PlanItem', backref='plan') - - merchant_id = Column(UUID, ForeignKey('merchant.id', - ondelete='CASCADE'), nullable=False) - - -class PlanProperty(BASE, PropertyMixin): - __table_args__ = (UniqueConstraint('name', 'plan_id', name='plan'),) - - plan = relationship('Plan', backref='properties', lazy='joined') - plan_id = Column( - UUID, - ForeignKey('plan.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - -class PlanItem(BASE, BaseMixin): - __table_args__ = (UniqueConstraint('plan_id', 'product_id', name='item'),) - - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - pricing = Column(JSON) - - plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), - onupdate='CASCADE', primary_key=True) - - product = relationship('Product', backref='plan_items', uselist=False) - product_id = Column(UUID, ForeignKey('product.id', onupdate='CASCADE'), - primary_key=True) - - -class Product(BASE, BaseMixin): - """ - A sellable Product, like vCPU hours, bandwidth units - """ - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - pricing = Column(JSON) - - merchant_id = Column(UUID, ForeignKey('merchant.id', ondelete='CASCADE'), - nullable=False) - - -class ProductProperty(BASE, PropertyMixin): - """ - A Metadata row for something like Product or PlanItem - """ - __table_args__ = (UniqueConstraint('name', 'product_id', name='product'),) - - product = relationship('Product', backref='properties', lazy='joined') - product_id = Column( - UUID, - ForeignKey('product.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - -class Subscription(BASE, BaseMixin): - """ - The thing that ties together stuff that is to be billed - - In other words a Plan which is a collection of Products or a Product. - """ - billing_day = Column(Integer) - - resource_id = Column(Unicode(255), nullable=False) - resource_type = Column(Unicode(255), nullable=True) - - plan = relationship('Plan', backref='subscriptions', uselist=False) - plan_id = Column(UUID, ForeignKey('plan.id', ondelete='CASCADE'), - nullable=False) - - customer = relationship('Customer', backref='subscriptions') - customer_id = Column(UUID, ForeignKey('customer.id', ondelete='CASCADE'), - nullable=False) - - payment_method_id = Column(UUID) diff --git a/billingstack/collector/__init__.py b/billingstack/collector/__init__.py deleted file mode 100644 index c3aaa39..0000000 --- a/billingstack/collector/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:collector', title="Configuration for collector Service" -)) - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:collector') diff --git a/billingstack/collector/flows/__init__.py b/billingstack/collector/flows/__init__.py deleted file mode 100644 index b2870ed..0000000 --- a/billingstack/collector/flows/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/collector/flows/gateway_configuration.py b/billingstack/collector/flows/gateway_configuration.py deleted file mode 100644 index 0acebd5..0000000 --- a/billingstack/collector/flows/gateway_configuration.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow.patterns import linear_flow - -from billingstack import exceptions -from billingstack import tasks -from billingstack.collector import states -from billingstack.openstack.common import log -from billingstack.payment_gateway import get_provider - - -ACTION = 'gateway_configuration:create' - -LOG = log.getLogger(__name__) - - -class EntryCreateTask(tasks.RootTask): - def __init__(self, storage, **kw): - super(EntryCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - values['state'] = states.VERIFYING - return self.storage.create_pg_config(ctxt, values) - - -class PrerequirementsTask(tasks.RootTask): - """ - Fetch provider information for use in the next task. - """ - def __init__(self, storage, **kw): - super(PrerequirementsTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, gateway_config): - return self.storage.get_pg_provider( - ctxt, gateway_config['provider_id']) - - -class BackendVerifyTask(tasks.RootTask): - """ - This is the verification task that runs in a threaded flow. - - 1. Load the Provider Plugin via entrypoints - 2. Instantiate the Plugin with the Config - 3. Execute verify_config call - 4. Update storage accordingly - """ - def __init__(self, storage, **kw): - super(BackendVerifyTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, gateway_config, gateway_provider): - gateway_provider_cls = get_provider(gateway_provider['name']) - gateway_provider_obj = gateway_provider_cls(gateway_config) - - try: - gateway_provider_obj.verify_config() - except exceptions.ConfigurationError: - self.storage.update_pg_config( - ctxt, gateway_config['id'], {'state': states.INVALID}) - raise - self.storage.update_pg_config( - ctxt, gateway_config['id'], {'state': states.ACTIVE}) - - -def create_flow(storage): - flow = linear_flow.Flow(ACTION + ':initial') - - entry_task = EntryCreateTask( - storage, provides='gateway_config', prefix=ACTION) - flow.add(entry_task) - - backend_flow = linear_flow.Flow(ACTION + ':backend') - prereq_task = PrerequirementsTask( - storage, provides='gateway_provider', prefix=ACTION) - backend_flow.add(prereq_task) - backend_flow.add(BackendVerifyTask(storage, prefix=ACTION)) - - flow.add(backend_flow) - - return flow diff --git a/billingstack/collector/flows/payment_method.py b/billingstack/collector/flows/payment_method.py deleted file mode 100644 index bf011b1..0000000 --- a/billingstack/collector/flows/payment_method.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow.patterns import linear_flow - -from billingstack import exceptions -from billingstack import tasks -from billingstack.collector import states -from billingstack.openstack.common import log -from billingstack.payment_gateway import get_provider - - -ACTION = 'payment_method:create' - -LOG = log.getLogger(__name__) - - -class EntryCreateTask(tasks.RootTask): - """ - Create the initial entry in the database - """ - def __init__(self, storage, **kw): - super(EntryCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - values['state'] = states.PENDING - return self.storage.create_payment_method(ctxt, values) - - -class PrerequirementsTask(tasks.RootTask): - """ - Task to get the config and the provider from the catalog / database. - """ - def __init__(self, storage, **kw): - super(PrerequirementsTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, values): - data = {} - data['gateway_config'] = self.storage.get_pg_config( - ctxt, values['provider_config_id']) - data['gateway_provider'] = self.storage.get_pg_provider( - ctxt, data['gateway_config']['provider_id']) - return data - - -class BackendCreateTask(tasks.RootTask): - def __init__(self, storage, **kw): - super(BackendCreateTask, self).__init__(**kw) - self.storage = storage - - def execute(self, ctxt, payment_method, gateway_config, gateway_provider): - gateway_provider_cls = get_provider(gateway_provider['name']) - gateway_provider_obj = gateway_provider_cls(gateway_config) - - try: - gateway_provider_obj.create_payment_method( - payment_method['customer_id'], - payment_method) - except exceptions.BadRequest: - self.storage.update_payment_method( - ctxt, payment_method['id'], {'status': states.INVALID}) - raise - - -def create_flow(storage): - """ - The flow for the service to start - """ - flow = linear_flow.Flow(ACTION + ':initial') - - entry_task = EntryCreateTask(storage, provides='payment_method', - prefix=ACTION) - flow.add(entry_task) - - backend_flow = linear_flow.Flow(ACTION + ':backend') - prereq_task = PrerequirementsTask( - storage, - provides=set([ - 'gateway_config', - 'gateway_provider']), - prefix=ACTION) - backend_flow.add(prereq_task) - backend_flow.add(BackendCreateTask(storage, prefix=ACTION)) - - flow.add(backend_flow) - - return flow diff --git a/billingstack/collector/rpcapi.py b/billingstack/collector/rpcapi.py deleted file mode 100644 index cb58cd8..0000000 --- a/billingstack/collector/rpcapi.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('collector_topic', default='collector', - help='the topic collector nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class CollectorAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(CollectorAPI, self).__init__( - topic=cfg.CONF.collector_topic, - default_version=self.BASE_RPC_VERSION) - - # PGP - def list_pg_providers(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_providers', - criterion=criterion)) - - def get_pg_provider(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_provider', id_=id_)) - - # PGM - def list_pg_methods(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_methods', - criterion=criterion)) - - def get_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_method', id_=id_)) - - def delete_pg_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_pg_method', id_=id_)) - - # PGC - def create_pg_config(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_pg_config', - values=values)) - - def list_pg_configs(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_pg_configs', - criterion=criterion)) - - def get_pg_config(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_pg_config', id_=id_)) - - def update_pg_config(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_pg_config', id_=id_, - values=values)) - - def delete_pg_config(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_pg_config', id_=id_)) - - # PaymentMethod - def create_payment_method(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_payment_method', - values=values)) - - def list_payment_methods(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_payment_methods', - criterion=criterion)) - - def get_payment_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_payment_method', id_=id_)) - - def update_payment_method(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_payment_method', id_=id_, - values=values)) - - def delete_payment_method(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_payment_method', id_=id_)) - - -collector_api = CollectorAPI() diff --git a/billingstack/collector/service.py b/billingstack/collector/service.py deleted file mode 100644 index f35d79c..0000000 --- a/billingstack/collector/service.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A service that does calls towards the PGP web endpoint or so -""" - -import sys - -from oslo.config import cfg -from taskflow.engines import run as run_flow - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.openstack.common import service as os_service -from billingstack.storage.utils import get_connection -from billingstack.central.rpcapi import CentralAPI -from billingstack import service as bs_service -from billingstack.collector.flows import ( - gateway_configuration, payment_method) - - -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('collector_topic', 'billingstack.collector.rpcapi') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.collector_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - # Get a storage connection - self.central_api = CentralAPI() - - def start(self): - self.storage_conn = get_connection('collector') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - # PGP - def list_pg_providers(self, ctxt, **kw): - return self.storage_conn.list_pg_providers(ctxt, **kw) - - # PGC - def create_pg_config(self, ctxt, values): - flow = gateway_configuration.create_flow(self.storage_conn) - results = run_flow(flow, store={'values': values, 'ctxt': ctxt}) - return results['gateway_config'] - - def list_pg_configs(self, ctxt, **kw): - return self.storage_conn.list_pg_configs(ctxt, **kw) - - def get_pg_config(self, ctxt, id_): - return self.storage_conn.get_pg_config(ctxt, id_) - - def update_pg_config(self, ctxt, id_, values): - return self.storage_conn.update_pg_config(ctxt, id_, values) - - def delete_pg_config(self, ctxt, id_): - return self.storage_conn.delete_pg_config(ctxt, id_) - - # PM - def create_payment_method(self, ctxt, values): - flow = payment_method.create_flow(self.storage_conn) - results = run_flow(flow, store={'values': values, 'ctxt': ctxt}) - return results['payment_method'] - - def list_payment_methods(self, ctxt, **kw): - return self.storage_conn.list_payment_methods(ctxt, **kw) - - def get_payment_method(self, ctxt, id_, **kw): - return self.storage_conn.get_payment_method(ctxt, id_) - - def update_payment_method(self, ctxt, id_, values): - return self.storage_conn.update_payment_method(ctxt, id_, values) - - def delete_payment_method(self, ctxt, id_): - return self.storage_conn.delete_payment_method(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:collector'].workers) - launcher.wait() diff --git a/billingstack/collector/states.py b/billingstack/collector/states.py deleted file mode 100644 index d883742..0000000 --- a/billingstack/collector/states.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2013 Hewlett-Packard Development Company, L.P. -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -PENDING = u'PENDING' -VERIFYING = u'VERIFYING' -ACTIVE = u'ACTIVE' -INVALID = u'INVALID' diff --git a/billingstack/collector/storage/__init__.py b/billingstack/collector/storage/__init__.py deleted file mode 100644 index 1fa53f1..0000000 --- a/billingstack/collector/storage/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.storage import base - - -class StorageEngine(base.StorageEngine): - """Base class for the collector storage""" - __plugin_ns__ = 'billingstack.collector.storage' - - -class Connection(base.Connection): - """Define the base API for collector storage""" - def pg_provider_register(self): - """ - Register a Provider and it's Methods - """ - raise NotImplementedError - - def list_pg_providers(self, ctxt, **kw): - """ - List available PG Providers - """ - raise NotImplementedError - - def get_pg_provider(self, ctxt, id_): - """ - Get a PaymentGateway Provider - """ - raise NotImplementedError - - def pg_provider_deregister(self, ctxt, id_): - """ - De-register a PaymentGateway Provider (Plugin) and all it's methods - """ - raise NotImplementedError - - def create_pg_config(self, ctxt, values): - """ - Create a PaymentGateway Configuration - """ - raise NotImplementedError - - def list_pg_configs(self, ctxt, **kw): - """ - List PaymentGateway Configurations - """ - raise NotImplementedError - - def get_pg_config(self, ctxt, id_): - """ - Get a PaymentGateway Configuration - """ - raise NotImplementedError - - def update_pg_config(self, ctxt, id_, values): - """ - Update a PaymentGateway Configuration - """ - raise NotImplementedError - - def delete_pg_config(self, ctxt, id_): - """ - Delete a PaymentGateway Configuration - """ - raise NotImplementedError - - def create_payment_method(self, ctxt, values): - """ - Configure a PaymentMethod like a CreditCard - """ - raise NotImplementedError - - def list_payment_methods(self, ctxt, criterion=None, **kw): - """ - List a Customer's PaymentMethods - """ - raise NotImplementedError - - def get_payment_method(self, ctxt, id_, **kw): - """ - Get a Customer's PaymentMethod - """ - raise NotImplementedError - - def update_payment_method(self, ctxt, id_, values): - """ - Update a Customer's PaymentMethod - """ - raise NotImplementedError - - def delete_payment_method(self, ctxt, id_): - """ - Delete a Customer's PaymentMethod - """ - raise NotImplementedError diff --git a/billingstack/collector/storage/impl_sqlalchemy.py b/billingstack/collector/storage/impl_sqlalchemy.py deleted file mode 100644 index 6d06ee7..0000000 --- a/billingstack/collector/storage/impl_sqlalchemy.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - - -from sqlalchemy import Column, ForeignKey -from sqlalchemy import Unicode -from sqlalchemy.orm import exc, relationship -from sqlalchemy.ext.declarative import declarative_base - -from billingstack.collector import states -from billingstack.collector.storage import Connection, StorageEngine -from billingstack.openstack.common import log as logging -from billingstack.sqlalchemy.types import JSON, UUID -from billingstack.sqlalchemy import api, model_base, session, utils - - -LOG = logging.getLogger(__name__) - - -BASE = declarative_base(cls=model_base.ModelBase) - - -cfg.CONF.register_group(cfg.OptGroup( - name='collector:sqlalchemy', - title='Config for collector sqlalchemy plugin')) - -cfg.CONF.register_opts(session.SQLOPTS, group='collector:sqlalchemy') - - -class PGProvider(BASE, model_base.BaseMixin): - """ - A Payment Gateway - The thing that processes a Payment Method - - This is registered either by the Admin or by the PaymentGateway plugin - """ - __tablename__ = 'pg_provider' - - name = Column(Unicode(60), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - properties = Column(JSON) - - methods = relationship( - 'PGMethod', - backref='provider', - lazy='joined') - - def method_map(self): - return self.attrs_map(['provider_methods']) - - -class PGMethod(BASE, model_base.BaseMixin): - """ - This represents a PaymentGatewayProviders method with some information - like name, type etc to describe what is in other settings known as a - "CreditCard" - - Example: - A Visa card: {"type": "creditcard", "visa"} - """ - __tablename__ = 'pg_method' - - name = Column(Unicode(100), nullable=False) - title = Column(Unicode(100)) - description = Column(Unicode(255)) - - type = Column(Unicode(100), nullable=False) - properties = Column(JSON) - - # NOTE: This is so a PGMethod can be "owned" by a Provider, meaning that - # other Providers should not be able to use it. - provider_id = Column(UUID, ForeignKey( - 'pg_provider.id', - ondelete='CASCADE', - onupdate='CASCADE')) - - @staticmethod - def make_key(data): - return '%(type)s:%(name)s' % data - - def key(self): - return self.make_key(self) - - -class PGConfig(BASE, model_base.BaseMixin): - """ - A Merchant's configuration of a PaymentGateway like api keys, url and more - """ - __tablename__ = 'pg_config' - - name = Column(Unicode(100), nullable=False) - title = Column(Unicode(100)) - - properties = Column(JSON) - - # Link to the Merchant - merchant_id = Column(UUID, nullable=False) - - provider = relationship('PGProvider', - backref='merchant_configurations') - provider_id = Column(UUID, ForeignKey('pg_provider.id', - onupdate='CASCADE'), - nullable=False) - - state = Column(Unicode(20), default=states.PENDING) - - -class PaymentMethod(BASE, model_base.BaseMixin): - name = Column(Unicode(255), nullable=False) - - identifier = Column(Unicode(255), nullable=False) - expires = Column(Unicode(255)) - - properties = Column(JSON) - - customer_id = Column(UUID, nullable=False) - - provider_config = relationship('PGConfig', backref='payment_methods', - lazy='joined') - provider_config_id = Column(UUID, ForeignKey('pg_config.id', - onupdate='CASCADE'), nullable=False) - - state = Column(Unicode(20), default=states.PENDING) - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection() - - -class Connection(Connection, api.HelpersMixin): - def __init__(self): - self.setup('collector:sqlalchemy') - - def base(self): - return BASE - - # Payment Gateway Providers - def pg_provider_register(self, ctxt, values): - values = values.copy() - methods = values.pop('methods', []) - - query = self.session.query(PGProvider)\ - .filter_by(name=values['name']) - - try: - provider = query.one() - except exc.NoResultFound: - provider = PGProvider() - - provider.update(values) - - self._set_provider_methods(ctxt, provider, methods) - - self._save(provider) - return self._dict(provider, extra=['methods']) - - def list_pg_providers(self, ctxt, **kw): - rows = self._list(PGProvider, **kw) - return [self._dict(r, extra=['methods']) for r in rows] - - def get_pg_provider(self, ctxt, id_, **kw): - row = self._get(PGProvider, id_) - return self._dict(row, extra=['methods']) - - def pg_provider_deregister(self, ctxt, id_): - self._delete(PGProvider, id_) - - def _get_provider_methods(self, provider): - """ - Used internally to form a "Map" of the Providers methods - """ - methods = {} - for m in provider.methods: - methods[m.key()] = m - return methods - - def _set_provider_methods(self, ctxt, provider, config_methods): - """Helper method for setting the Methods for a Provider""" - existing = self._get_provider_methods(provider) - for method in config_methods: - self._set_method(provider, method, existing) - - def _set_method(self, provider, method, existing): - key = PGMethod.make_key(method) - - if key in existing: - existing[key].update(method) - else: - row = PGMethod(**method) - provider.methods.append(row) - - # Payment Gateway Configuration - def create_pg_config(self, ctxt, values): - row = PGConfig(**values) - - self._save(row) - return dict(row) - - def list_pg_configs(self, ctxt, **kw): - rows = self._list(PGConfig, **kw) - return map(dict, rows) - - def get_pg_config(self, ctxt, id_, **kw): - row = self._get(PGConfig, id_, **kw) - return dict(row) - - def update_pg_config(self, ctxt, id_, values): - row = self._update(PGConfig, id_, values) - return dict(row) - - def delete_pg_config(self, ctxt, id_): - self._delete(PGConfig, id_) - - # PaymentMethod - def create_payment_method(self, ctxt, values): - row = PaymentMethod(**values) - - self._save(row) - return self._dict(row) - - def list_payment_methods(self, ctxt, criterion=None, **kw): - query = self.session.query(PaymentMethod) - - # NOTE: Filter needs to be joined for merchant_id - query = utils.filter_merchant_by_join( - query, PGConfig, criterion) - - rows = self._list( - cls=PaymentMethod, - query=query, - criterion=criterion, - **kw) - - return [self._dict(row) for row in rows] - - def get_payment_method(self, ctxt, id_, **kw): - row = self._get_id_or_name(PaymentMethod, id_) - return self._dict(row) - - def update_payment_method(self, ctxt, id_, values): - row = self._update(PaymentMethod, id_, values) - return self._dict(row) - - def delete_payment_method(self, ctxt, id_): - self._delete(PaymentMethod, id_) diff --git a/billingstack/conf.py b/billingstack/conf.py deleted file mode 100644 index 0e56443..0000000 --- a/billingstack/conf.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import os -from oslo.config import cfg - -from billingstack.openstack.common import rpc - -cfg.CONF.register_opts([ - cfg.StrOpt('pybasedir', - default=os.path.abspath(os.path.join(os.path.dirname(__file__), - '../')), - help='Directory where the nova python module is installed'), - cfg.StrOpt('state-path', default='$pybasedir', - help='Top-level directory for maintaining state') -]) - - -rpc.set_defaults(control_exchange='billingstack') diff --git a/billingstack/exceptions.py b/billingstack/exceptions.py deleted file mode 100644 index 3ca9d50..0000000 --- a/billingstack/exceptions.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import re - - -class Base(Exception): - error_code = 500 - message_tmpl = None - - def __init__(self, message='', *args, **kw): - self.message = message % kw if self.message_tmpl else message - - self.errors = kw.pop('errors', None) - super(Base, self).__init__(self.message) - - @property - def error_type(self): - name = "_".join(l.lower() for l in re.findall('[A-Z][^A-Z]*', - self.__class__.__name__)) - name = re.sub('_+remote$', '', name) - return name - - def __str__(self): - return self.message - - def get_message(self): - """ - Return the exception message or None - """ - if unicode(self): - return unicode(self) - else: - return None - - -class NotImplemented(Base, NotImplementedError): - pass - - -class ConfigurationError(Base): - pass - - -class BadRequest(Base): - error_code = 400 - - -class InvalidObject(BadRequest): - pass - - -class InvalidSortKey(BadRequest): - pass - - -class InvalidQueryField(BadRequest): - pass - - -class InvalidOperator(BadRequest): - pass - - -class Forbidden(Base): - pass - - -class Duplicate(Base): - error_code = 409 - - -class NotFound(Base): - error_code = 404 diff --git a/billingstack/manage/__init__.py b/billingstack/manage/__init__.py deleted file mode 100644 index 92d5c66..0000000 --- a/billingstack/manage/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from oslo.config import cfg -from cliff.app import App -from cliff.commandmanager import CommandManager -from billingstack.version import version_info as version - - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -class Shell(App): - def __init__(self): - super(Shell, self).__init__( - description='BillingStack Management CLI', - version=version.version_string(), - command_manager=CommandManager('billingstack.manage') - ) diff --git a/billingstack/manage/base.py b/billingstack/manage/base.py deleted file mode 100644 index e28e566..0000000 --- a/billingstack/manage/base.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from cliff.command import Command as CliffCommand -from cliff.lister import Lister -from cliff.show import ShowOne -from billingstack import utils - - -class Command(CliffCommand): - - def run(self, parsed_args): - #self.context = billingstackContext.get_admin_context() - - return super(Command, self).run(parsed_args) - - def execute(self, parsed_args): - """ - Execute something, this is since we overload self.take_action() - in order to format the data - - This method __NEEDS__ to be overloaded! - - :param parsed_args: The parsed args that are given by take_action() - """ - raise NotImplementedError - - def post_execute(self, data): - """ - Format the results locally if needed, by default we just return data - - :param data: Whatever is returned by self.execute() - """ - return data - - def setup(self, parsed_args): - pass - - def take_action(self, parsed_args): - # TODO: Common Exception Handling Here - self.setup(parsed_args) - results = self.execute(parsed_args) - return self.post_execute(results) - - -class ListCommand(Command, Lister): - def post_execute(self, results): - if len(results) > 0: - columns = utils.get_columns(results) - data = [utils.get_item_properties(i, columns) for i in results] - return columns, data - else: - return [], () - - -class GetCommand(Command, ShowOne): - def post_execute(self, results): - return results.keys(), results.values() - - -class CreateCommand(Command, ShowOne): - def post_execute(self, results): - return results.keys(), results.values() - - -class UpdateCommand(Command, ShowOne): - def post_execute(self, results): - return results.keys(), results.values() - - -class DeleteCommand(Command): - pass diff --git a/billingstack/manage/database.py b/billingstack/manage/database.py deleted file mode 100644 index c2147cb..0000000 --- a/billingstack/manage/database.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common import log -from billingstack.manage.base import Command -from billingstack.storage.utils import get_connection - - -LOG = log.getLogger(__name__) - - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - - -class DatabaseCommand(Command): - """ - A Command that uses a storage connection to do some stuff - """ - def get_connection(self, service): - return get_connection(service) diff --git a/billingstack/manage/provider.py b/billingstack/manage/provider.py deleted file mode 100644 index d23e1c3..0000000 --- a/billingstack/manage/provider.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common.context import get_admin_context -from billingstack.payment_gateway import register_providers -from billingstack.manage.base import ListCommand -from billingstack.manage.database import DatabaseCommand - - -class ProvidersRegister(DatabaseCommand): - """ - Register Payment Gateway Providers - """ - def execute(self, parsed_args): - context = get_admin_context() - register_providers(context) - - -class ProvidersList(DatabaseCommand, ListCommand): - def execute(self, parsed_args): - context = get_admin_context() - conn = self.get_connection('collector') - - data = conn.list_pg_providers(context) - - for p in data: - keys = ['type', 'name'] - methods = [":".join([m[k] for k in keys]) for m in p['methods']] - p['methods'] = ", ".join(methods) - return data diff --git a/billingstack/netconf.py b/billingstack/netconf.py deleted file mode 100644 index 21233f6..0000000 --- a/billingstack/netconf.py +++ /dev/null @@ -1,59 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import socket - -from oslo.config import cfg - -CONF = cfg.CONF - - -def _get_my_ip(): - """ - Returns the actual ip of the local machine. - - This code figures out what source address would be used if some traffic - were to be sent out to some well known address on the Internet. In this - case, a Google DNS server is used, but the specific address does not - matter much. No traffic is actually sent. - """ - try: - csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - csock.connect(('8.8.8.8', 80)) - (addr, port) = csock.getsockname() - csock.close() - return addr - except socket.error: - return "127.0.0.1" - - -netconf_opts = [ - cfg.StrOpt('my_ip', - default=_get_my_ip(), - help='ip address of this host'), - cfg.StrOpt('host', - default=socket.getfqdn(), - help='Name of this node. This can be an opaque identifier. ' - 'It is not necessarily a hostname, FQDN, or IP address. ' - 'However, the node name must be valid within ' - 'an AMQP key, and if using ZeroMQ, a valid ' - 'hostname, FQDN, or IP address') -] - -CONF.register_opts(netconf_opts) diff --git a/billingstack/openstack/__init__.py b/billingstack/openstack/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/openstack/common/__init__.py b/billingstack/openstack/common/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/openstack/common/context.py b/billingstack/openstack/common/context.py deleted file mode 100644 index d074b02..0000000 --- a/billingstack/openstack/common/context.py +++ /dev/null @@ -1,86 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Simple class that stores security context information in the web request. - -Projects should subclass this class if they wish to enhance the request -context or provide additional information in their specific WSGI pipeline. -""" - -import itertools - -from billingstack.openstack.common import uuidutils - - -def generate_request_id(): - return 'req-%s' % uuidutils.generate_uuid() - - -class RequestContext(object): - - """Helper class to represent useful information about a request context. - - Stores information about the security context under which the user - accesses the system, as well as additional request information. - """ - - def __init__(self, auth_token=None, user=None, tenant=None, is_admin=False, - read_only=False, show_deleted=False, request_id=None, - instance_uuid=None): - self.auth_token = auth_token - self.user = user - self.tenant = tenant - self.is_admin = is_admin - self.read_only = read_only - self.show_deleted = show_deleted - self.instance_uuid = instance_uuid - if not request_id: - request_id = generate_request_id() - self.request_id = request_id - - def to_dict(self): - return {'user': self.user, - 'tenant': self.tenant, - 'is_admin': self.is_admin, - 'read_only': self.read_only, - 'show_deleted': self.show_deleted, - 'auth_token': self.auth_token, - 'request_id': self.request_id, - 'instance_uuid': self.instance_uuid} - - -def get_admin_context(show_deleted=False): - context = RequestContext(None, - tenant=None, - is_admin=True, - show_deleted=show_deleted) - return context - - -def get_context_from_function_and_args(function, args, kwargs): - """Find an arg of type RequestContext and return it. - - This is useful in a couple of decorators where we don't - know much about the function we're wrapping. - """ - - for arg in itertools.chain(kwargs.values(), args): - if isinstance(arg, RequestContext): - return arg - - return None diff --git a/billingstack/openstack/common/crypto/__init__.py b/billingstack/openstack/common/crypto/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/openstack/common/crypto/utils.py b/billingstack/openstack/common/crypto/utils.py deleted file mode 100644 index 08e2f4c..0000000 --- a/billingstack/openstack/common/crypto/utils.py +++ /dev/null @@ -1,179 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 - -from Crypto.Hash import HMAC -from Crypto import Random - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils - - -class CryptoutilsException(Exception): - """Generic Exception for Crypto utilities.""" - - message = _("An unknown error occurred in crypto utils.") - - -class CipherBlockLengthTooBig(CryptoutilsException): - """The block size is too big.""" - - def __init__(self, requested, permitted): - msg = _("Block size of %(given)d is too big, max = %(maximum)d") - message = msg % {'given': requested, 'maximum': permitted} - super(CryptoutilsException, self).__init__(message) - - -class HKDFOutputLengthTooLong(CryptoutilsException): - """The amount of Key Material asked is too much.""" - - def __init__(self, requested, permitted): - msg = _("Length of %(given)d is too long, max = %(maximum)d") - message = msg % {'given': requested, 'maximum': permitted} - super(CryptoutilsException, self).__init__(message) - - -class HKDF(object): - """An HMAC-based Key Derivation Function implementation (RFC5869) - - This class creates an object that allows to use HKDF to derive keys. - """ - - def __init__(self, hashtype='SHA256'): - self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) - self.max_okm_length = 255 * self.hashfn.digest_size - - def extract(self, ikm, salt=None): - """An extract function that can be used to derive a robust key given - weak Input Key Material (IKM) which could be a password. - Returns a pseudorandom key (of HashLen octets) - - :param ikm: input keying material (ex a password) - :param salt: optional salt value (a non-secret random value) - """ - if salt is None: - salt = '\x00' * self.hashfn.digest_size - - return HMAC.new(salt, ikm, self.hashfn).digest() - - def expand(self, prk, info, length): - """An expand function that will return arbitrary length output that can - be used as keys. - Returns a buffer usable as key material. - - :param prk: a pseudorandom key of at least HashLen octets - :param info: optional string (can be a zero-length string) - :param length: length of output keying material (<= 255 * HashLen) - """ - if length > self.max_okm_length: - raise HKDFOutputLengthTooLong(length, self.max_okm_length) - - N = (length + self.hashfn.digest_size - 1) / self.hashfn.digest_size - - okm = "" - tmp = "" - for block in range(1, N + 1): - tmp = HMAC.new(prk, tmp + info + chr(block), self.hashfn).digest() - okm += tmp - - return okm[:length] - - -MAX_CB_SIZE = 256 - - -class SymmetricCrypto(object): - """Symmetric Key Crypto object. - - This class creates a Symmetric Key Crypto object that can be used - to encrypt, decrypt, or sign arbitrary data. - - :param enctype: Encryption Cipher name (default: AES) - :param hashtype: Hash/HMAC type name (default: SHA256) - """ - - def __init__(self, enctype='AES', hashtype='SHA256'): - self.cipher = importutils.import_module('Crypto.Cipher.' + enctype) - self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype) - - def new_key(self, size): - return Random.new().read(size) - - def encrypt(self, key, msg, b64encode=True): - """Encrypt the provided msg and returns the cyphertext optionally - base64 encoded. - - Uses AES-128-CBC with a Random IV by default. - - The plaintext is padded to reach blocksize length. - The last byte of the block is the length of the padding. - The length of the padding does not include the length byte itself. - - :param key: The Encryption key. - :param msg: the plain text. - - :returns encblock: a block of encrypted data. - """ - iv = Random.new().read(self.cipher.block_size) - cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) - - # CBC mode requires a fixed block size. Append padding and length of - # padding. - if self.cipher.block_size > MAX_CB_SIZE: - raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE) - r = len(msg) % self.cipher.block_size - padlen = self.cipher.block_size - r - 1 - msg += '\x00' * padlen - msg += chr(padlen) - - enc = iv + cipher.encrypt(msg) - if b64encode: - enc = base64.b64encode(enc) - return enc - - def decrypt(self, key, msg, b64decode=True): - """Decrypts the provided ciphertext, optionally base 64 encoded, and - returns the plaintext message, after padding is removed. - - Uses AES-128-CBC with an IV by default. - - :param key: The Encryption key. - :param msg: the ciphetext, the first block is the IV - """ - if b64decode: - msg = base64.b64decode(msg) - iv = msg[:self.cipher.block_size] - cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv) - - padded = cipher.decrypt(msg[self.cipher.block_size:]) - l = ord(padded[-1]) + 1 - plain = padded[:-l] - return plain - - def sign(self, key, msg, b64encode=True): - """Signs a message string and returns a base64 encoded signature. - - Uses HMAC-SHA-256 by default. - - :param key: The Signing key. - :param msg: the message to sign. - """ - h = HMAC.new(key, msg, self.hashfn) - out = h.digest() - if b64encode: - out = base64.b64encode(out) - return out diff --git a/billingstack/openstack/common/db/__init__.py b/billingstack/openstack/common/db/__init__.py deleted file mode 100644 index 1b9b60d..0000000 --- a/billingstack/openstack/common/db/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Cloudscaling Group, Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/openstack/common/db/api.py b/billingstack/openstack/common/db/api.py deleted file mode 100644 index 9505ea8..0000000 --- a/billingstack/openstack/common/db/api.py +++ /dev/null @@ -1,106 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2013 Rackspace Hosting -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Multiple DB API backend support. - -Supported configuration options: - -The following two parameters are in the 'database' group: -`backend`: DB backend name or full module path to DB backend module. -`use_tpool`: Enable thread pooling of DB API calls. - -A DB backend module should implement a method named 'get_backend' which -takes no arguments. The method can return any object that implements DB -API methods. - -*NOTE*: There are bugs in eventlet when using tpool combined with -threading locks. The python logging module happens to use such locks. To -work around this issue, be sure to specify thread=False with -eventlet.monkey_patch(). - -A bug for eventlet has been filed here: - -https://bitbucket.org/eventlet/eventlet/issue/137/ -""" -import functools - -from oslo.config import cfg - -from billingstack.openstack.common import importutils -from billingstack.openstack.common import lockutils - - -db_opts = [ - cfg.StrOpt('backend', - default='sqlalchemy', - deprecated_name='db_backend', - deprecated_group='DEFAULT', - help='The backend to use for db'), - cfg.BoolOpt('use_tpool', - default=False, - deprecated_name='dbapi_use_tpool', - deprecated_group='DEFAULT', - help='Enable the experimental use of thread pooling for ' - 'all DB API calls') -] - -CONF = cfg.CONF -CONF.register_opts(db_opts, 'database') - - -class DBAPI(object): - def __init__(self, backend_mapping=None): - if backend_mapping is None: - backend_mapping = {} - self.__backend = None - self.__backend_mapping = backend_mapping - - @lockutils.synchronized('dbapi_backend', 'billingstack-') - def __get_backend(self): - """Get the actual backend. May be a module or an instance of - a class. Doesn't matter to us. We do this synchronized as it's - possible multiple greenthreads started very quickly trying to do - DB calls and eventlet can switch threads before self.__backend gets - assigned. - """ - if self.__backend: - # Another thread assigned it - return self.__backend - backend_name = CONF.database.backend - self.__use_tpool = CONF.database.use_tpool - if self.__use_tpool: - from eventlet import tpool - self.__tpool = tpool - # Import the untranslated name if we don't have a - # mapping. - backend_path = self.__backend_mapping.get(backend_name, - backend_name) - backend_mod = importutils.import_module(backend_path) - self.__backend = backend_mod.get_backend() - return self.__backend - - def __getattr__(self, key): - backend = self.__backend or self.__get_backend() - attr = getattr(backend, key) - if not self.__use_tpool or not hasattr(attr, '__call__'): - return attr - - def tpool_wrapper(*args, **kwargs): - return self.__tpool.execute(attr, *args, **kwargs) - - functools.update_wrapper(tpool_wrapper, attr) - return tpool_wrapper diff --git a/billingstack/openstack/common/db/exception.py b/billingstack/openstack/common/db/exception.py deleted file mode 100644 index 01a847a..0000000 --- a/billingstack/openstack/common/db/exception.py +++ /dev/null @@ -1,51 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""DB related custom exceptions.""" - -from billingstack.openstack.common.gettextutils import _ # noqa - - -class DBError(Exception): - """Wraps an implementation specific exception.""" - def __init__(self, inner_exception=None): - self.inner_exception = inner_exception - super(DBError, self).__init__(str(inner_exception)) - - -class DBDuplicateEntry(DBError): - """Wraps an implementation specific exception.""" - def __init__(self, columns=[], inner_exception=None): - self.columns = columns - super(DBDuplicateEntry, self).__init__(inner_exception) - - -class DBDeadlock(DBError): - def __init__(self, inner_exception=None): - super(DBDeadlock, self).__init__(inner_exception) - - -class DBInvalidUnicodeParameter(Exception): - message = _("Invalid Parameter: " - "Unicode is not supported by the current database.") - - -class DbMigrationError(DBError): - """Wraps migration specific exception.""" - def __init__(self, message=None): - super(DbMigrationError, self).__init__(str(message)) diff --git a/billingstack/openstack/common/db/sqlalchemy/__init__.py b/billingstack/openstack/common/db/sqlalchemy/__init__.py deleted file mode 100644 index 1b9b60d..0000000 --- a/billingstack/openstack/common/db/sqlalchemy/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Cloudscaling Group, Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/openstack/common/db/sqlalchemy/models.py b/billingstack/openstack/common/db/sqlalchemy/models.py deleted file mode 100644 index a188a7a..0000000 --- a/billingstack/openstack/common/db/sqlalchemy/models.py +++ /dev/null @@ -1,103 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2011 X.commerce, a business unit of eBay Inc. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Piston Cloud Computing, Inc. -# Copyright 2012 Cloudscaling Group, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -SQLAlchemy models. -""" - -from sqlalchemy import Column, Integer -from sqlalchemy import DateTime -from sqlalchemy.orm import object_mapper - -from billingstack.openstack.common.db.sqlalchemy.session import get_session -from billingstack.openstack.common import timeutils - - -class ModelBase(object): - """Base class for models.""" - __table_initialized__ = False - created_at = Column(DateTime, default=timeutils.utcnow) - updated_at = Column(DateTime, onupdate=timeutils.utcnow) - metadata = None - - def save(self, session=None): - """Save this object.""" - if not session: - session = get_session() - # NOTE(boris-42): This part of code should be look like: - # sesssion.add(self) - # session.flush() - # But there is a bug in sqlalchemy and eventlet that - # raises NoneType exception if there is no running - # transaction and rollback is called. As long as - # sqlalchemy has this bug we have to create transaction - # explicity. - with session.begin(subtransactions=True): - session.add(self) - session.flush() - - def __setitem__(self, key, value): - setattr(self, key, value) - - def __getitem__(self, key): - return getattr(self, key) - - def get(self, key, default=None): - return getattr(self, key, default) - - def __iter__(self): - columns = dict(object_mapper(self).columns).keys() - # NOTE(russellb): Allow models to specify other keys that can be looked - # up, beyond the actual db columns. An example would be the 'name' - # property for an Instance. - if hasattr(self, '_extra_keys'): - columns.extend(self._extra_keys()) - self._i = iter(columns) - return self - - def next(self): - n = self._i.next() - return n, getattr(self, n) - - def update(self, values): - """Make the model object behave like a dict.""" - for k, v in values.iteritems(): - setattr(self, k, v) - - def iteritems(self): - """Make the model object behave like a dict. - - Includes attributes from joins.""" - local = dict(self) - joined = dict([(k, v) for k, v in self.__dict__.iteritems() - if not k[0] == '_']) - local.update(joined) - return local.iteritems() - - -class SoftDeleteMixin(object): - deleted_at = Column(DateTime) - deleted = Column(Integer, default=0) - - def soft_delete(self, session=None): - """Mark this object as deleted.""" - self.deleted = self.id - self.deleted_at = timeutils.utcnow() - self.save(session=session) diff --git a/billingstack/openstack/common/db/sqlalchemy/utils.py b/billingstack/openstack/common/db/sqlalchemy/utils.py deleted file mode 100644 index c8ab93e..0000000 --- a/billingstack/openstack/common/db/sqlalchemy/utils.py +++ /dev/null @@ -1,132 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010-2011 OpenStack LLC. -# Copyright 2012 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Implementation of paginate query.""" - -import sqlalchemy - -from openstack.common.gettextutils import _ -from openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class InvalidSortKey(Exception): - message = _("Sort key supplied was not valid.") - - -# copy from glance/db/sqlalchemy/api.py -def paginate_query(query, model, limit, sort_keys, marker=None, - sort_dir=None, sort_dirs=None): - """Returns a query with sorting / pagination criteria added. - - Pagination works by requiring a unique sort_key, specified by sort_keys. - (If sort_keys is not unique, then we risk looping through values.) - We use the last row in the previous page as the 'marker' for pagination. - So we must return values that follow the passed marker in the order. - With a single-valued sort_key, this would be easy: sort_key > X. - With a compound-values sort_key, (k1, k2, k3) we must do this to repeat - the lexicographical ordering: - (k1 > X1) or (k1 == X1 && k2 > X2) or (k1 == X1 && k2 == X2 && k3 > X3) - - We also have to cope with different sort_directions. - - Typically, the id of the last row is used as the client-facing pagination - marker, then the actual marker object must be fetched from the db and - passed in to us as marker. - - :param query: the query object to which we should add paging/sorting - :param model: the ORM model class - :param limit: maximum number of items to return - :param sort_keys: array of attributes by which results should be sorted - :param marker: the last item of the previous page; we returns the next - results after this value. - :param sort_dir: direction in which results should be sorted (asc, desc) - :param sort_dirs: per-column array of sort_dirs, corresponding to sort_keys - - :rtype: sqlalchemy.orm.query.Query - :return: The query with sorting/pagination added. - """ - - if 'id' not in sort_keys: - # TODO(justinsb): If this ever gives a false-positive, check - # the actual primary key, rather than assuming its id - LOG.warn(_('Id not in sort_keys; is sort_keys unique?')) - - assert(not (sort_dir and sort_dirs)) - - # Default the sort direction to ascending - if sort_dirs is None and sort_dir is None: - sort_dir = 'asc' - - # Ensure a per-column sort direction - if sort_dirs is None: - sort_dirs = [sort_dir for _sort_key in sort_keys] - - assert(len(sort_dirs) == len(sort_keys)) - - # Add sorting - for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs): - sort_dir_func = { - 'asc': sqlalchemy.asc, - 'desc': sqlalchemy.desc, - }[current_sort_dir] - - try: - sort_key_attr = getattr(model, current_sort_key) - except AttributeError: - raise InvalidSortKey() - query = query.order_by(sort_dir_func(sort_key_attr)) - - # Add pagination - if marker is not None: - marker_values = [] - for sort_key in sort_keys: - v = getattr(marker, sort_key) - marker_values.append(v) - - # Build up an array of sort criteria as in the docstring - criteria_list = [] - for i in xrange(0, len(sort_keys)): - crit_attrs = [] - for j in xrange(0, i): - model_attr = getattr(model, sort_keys[j]) - crit_attrs.append((model_attr == marker_values[j])) - - model_attr = getattr(model, sort_keys[i]) - if sort_dirs[i] == 'desc': - crit_attrs.append((model_attr < marker_values[i])) - elif sort_dirs[i] == 'asc': - crit_attrs.append((model_attr > marker_values[i])) - else: - raise ValueError(_("Unknown sort direction, " - "must be 'desc' or 'asc'")) - - criteria = sqlalchemy.sql.and_(*crit_attrs) - criteria_list.append(criteria) - - f = sqlalchemy.sql.or_(*criteria_list) - query = query.filter(f) - - if limit is not None: - query = query.limit(limit) - - return query diff --git a/billingstack/openstack/common/eventlet_backdoor.py b/billingstack/openstack/common/eventlet_backdoor.py deleted file mode 100644 index e7d550a..0000000 --- a/billingstack/openstack/common/eventlet_backdoor.py +++ /dev/null @@ -1,146 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2012 OpenStack Foundation. -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import print_function - -import errno -import gc -import os -import pprint -import socket -import sys -import traceback - -import eventlet -import eventlet.backdoor -import greenlet -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - -help_for_backdoor_port = ( - "Acceptable values are 0, , and :, where 0 results " - "in listening on a random tcp port number; results in listening " - "on the specified port number (and not enabling backdoor if that port " - "is in use); and : results in listening on the smallest " - "unused port number within the specified range of port numbers. The " - "chosen port is displayed in the service's log file.") -eventlet_backdoor_opts = [ - cfg.StrOpt('backdoor_port', - default=None, - help="Enable eventlet backdoor. %s" % help_for_backdoor_port) -] - -CONF = cfg.CONF -CONF.register_opts(eventlet_backdoor_opts) -LOG = logging.getLogger(__name__) - - -class EventletBackdoorConfigValueError(Exception): - def __init__(self, port_range, help_msg, ex): - msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' - '%(help)s' % - {'range': port_range, 'ex': ex, 'help': help_msg}) - super(EventletBackdoorConfigValueError, self).__init__(msg) - self.port_range = port_range - - -def _dont_use_this(): - print("Don't use this, just disconnect instead") - - -def _find_objects(t): - return filter(lambda o: isinstance(o, t), gc.get_objects()) - - -def _print_greenthreads(): - for i, gt in enumerate(_find_objects(greenlet.greenlet)): - print(i, gt) - traceback.print_stack(gt.gr_frame) - print() - - -def _print_nativethreads(): - for threadId, stack in sys._current_frames().items(): - print(threadId) - traceback.print_stack(stack) - print() - - -def _parse_port_range(port_range): - if ':' not in port_range: - start, end = port_range, port_range - else: - start, end = port_range.split(':', 1) - try: - start, end = int(start), int(end) - if end < start: - raise ValueError - return start, end - except ValueError as ex: - raise EventletBackdoorConfigValueError(port_range, ex, - help_for_backdoor_port) - - -def _listen(host, start_port, end_port, listen_func): - try_port = start_port - while True: - try: - return listen_func((host, try_port)) - except socket.error as exc: - if (exc.errno != errno.EADDRINUSE or - try_port >= end_port): - raise - try_port += 1 - - -def initialize_if_enabled(): - backdoor_locals = { - 'exit': _dont_use_this, # So we don't exit the entire process - 'quit': _dont_use_this, # So we don't exit the entire process - 'fo': _find_objects, - 'pgt': _print_greenthreads, - 'pnt': _print_nativethreads, - } - - if CONF.backdoor_port is None: - return None - - start_port, end_port = _parse_port_range(str(CONF.backdoor_port)) - - # NOTE(johannes): The standard sys.displayhook will print the value of - # the last expression and set it to __builtin__._, which overwrites - # the __builtin__._ that gettext sets. Let's switch to using pprint - # since it won't interact poorly with gettext, and it's easier to - # read the output too. - def displayhook(val): - if val is not None: - pprint.pprint(val) - sys.displayhook = displayhook - - sock = _listen('localhost', start_port, end_port, eventlet.listen) - - # In the case of backdoor port being zero, a port number is assigned by - # listen(). In any case, pull the port number out here. - port = sock.getsockname()[1] - LOG.info(_('Eventlet backdoor listening on %(port)s for process %(pid)d') % - {'port': port, 'pid': os.getpid()}) - eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, - locals=backdoor_locals) - return port diff --git a/billingstack/openstack/common/exception.py b/billingstack/openstack/common/exception.py deleted file mode 100644 index df4c277..0000000 --- a/billingstack/openstack/common/exception.py +++ /dev/null @@ -1,139 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exceptions common to OpenStack projects -""" - -import logging - -from billingstack.openstack.common.gettextutils import _ # noqa - -_FATAL_EXCEPTION_FORMAT_ERRORS = False - - -class Error(Exception): - def __init__(self, message=None): - super(Error, self).__init__(message) - - -class ApiError(Error): - def __init__(self, message='Unknown', code='Unknown'): - self.api_message = message - self.code = code - super(ApiError, self).__init__('%s: %s' % (code, message)) - - -class NotFound(Error): - pass - - -class UnknownScheme(Error): - - msg_fmt = "Unknown scheme '%s' found in URI" - - def __init__(self, scheme): - msg = self.msg_fmt % scheme - super(UnknownScheme, self).__init__(msg) - - -class BadStoreUri(Error): - - msg_fmt = "The Store URI %s was malformed. Reason: %s" - - def __init__(self, uri, reason): - msg = self.msg_fmt % (uri, reason) - super(BadStoreUri, self).__init__(msg) - - -class Duplicate(Error): - pass - - -class NotAuthorized(Error): - pass - - -class NotEmpty(Error): - pass - - -class Invalid(Error): - pass - - -class BadInputError(Exception): - """Error resulting from a client sending bad input to a server""" - pass - - -class MissingArgumentError(Error): - pass - - -class DatabaseMigrationError(Error): - pass - - -class ClientConnectionError(Exception): - """Error resulting from a client connecting to a server""" - pass - - -def wrap_exception(f): - def _wrap(*args, **kw): - try: - return f(*args, **kw) - except Exception as e: - if not isinstance(e, Error): - logging.exception(_('Uncaught exception')) - raise Error(str(e)) - raise - _wrap.func_name = f.func_name - return _wrap - - -class OpenstackException(Exception): - """Base Exception class. - - To correctly use this class, inherit from it and define - a 'msg_fmt' property. That message will get printf'd - with the keyword arguments provided to the constructor. - """ - msg_fmt = "An unknown exception occurred" - - def __init__(self, **kwargs): - try: - self._error_string = self.msg_fmt % kwargs - - except Exception: - if _FATAL_EXCEPTION_FORMAT_ERRORS: - raise - else: - # at least get the core message out if something happened - self._error_string = self.msg_fmt - - def __str__(self): - return self._error_string - - -class MalformedRequestBody(OpenstackException): - msg_fmt = "Malformed message body: %(reason)s" - - -class InvalidContentType(OpenstackException): - msg_fmt = "Invalid content type %(content_type)s" diff --git a/billingstack/openstack/common/excutils.py b/billingstack/openstack/common/excutils.py deleted file mode 100644 index 7c4db8a..0000000 --- a/billingstack/openstack/common/excutils.py +++ /dev/null @@ -1,101 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Exception related utilities. -""" - -import logging -import sys -import time -import traceback - -import six - -from billingstack.openstack.common.gettextutils import _ # noqa - - -class save_and_reraise_exception(object): - """Save current exception, run some code and then re-raise. - - In some cases the exception context can be cleared, resulting in None - being attempted to be re-raised after an exception handler is run. This - can happen when eventlet switches greenthreads or when running an - exception handler, code raises and catches an exception. In both - cases the exception context will be cleared. - - To work around this, we save the exception state, run handler code, and - then re-raise the original exception. If another exception occurs, the - saved exception is logged and the new exception is re-raised. - - In some cases the caller may not want to re-raise the exception, and - for those circumstances this context provides a reraise flag that - can be used to suppress the exception. For example: - - except Exception: - with save_and_reraise_exception() as ctxt: - decide_if_need_reraise() - if not should_be_reraised: - ctxt.reraise = False - """ - def __init__(self): - self.reraise = True - - def __enter__(self): - self.type_, self.value, self.tb, = sys.exc_info() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None: - logging.error(_('Original exception being dropped: %s'), - traceback.format_exception(self.type_, - self.value, - self.tb)) - return False - if self.reraise: - six.reraise(self.type_, self.value, self.tb) - - -def forever_retry_uncaught_exceptions(infunc): - def inner_func(*args, **kwargs): - last_log_time = 0 - last_exc_message = None - exc_count = 0 - while True: - try: - return infunc(*args, **kwargs) - except Exception as exc: - this_exc_message = six.u(str(exc)) - if this_exc_message == last_exc_message: - exc_count += 1 - else: - exc_count = 1 - # Do not log any more frequently than once a minute unless - # the exception message changes - cur_time = int(time.time()) - if (cur_time - last_log_time > 60 or - this_exc_message != last_exc_message): - logging.exception( - _('Unexpected exception occurred %d time(s)... ' - 'retrying.') % exc_count) - last_log_time = cur_time - last_exc_message = this_exc_message - exc_count = 0 - # This should be a very rare event. In case it isn't, do - # a sleep. - time.sleep(1) - return inner_func diff --git a/billingstack/openstack/common/fileutils.py b/billingstack/openstack/common/fileutils.py deleted file mode 100644 index d452c25..0000000 --- a/billingstack/openstack/common/fileutils.py +++ /dev/null @@ -1,139 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import contextlib -import errno -import os -import tempfile - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - -LOG = logging.getLogger(__name__) - -_FILE_CACHE = {} - - -def ensure_tree(path): - """Create a directory (and any ancestor directories required) - - :param path: Directory to create - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST: - if not os.path.isdir(path): - raise - else: - raise - - -def read_cached_file(filename, force_reload=False): - """Read from a file if it has been modified. - - :param force_reload: Whether to reload the file. - :returns: A tuple with a boolean specifying if the data is fresh - or not. - """ - global _FILE_CACHE - - if force_reload and filename in _FILE_CACHE: - del _FILE_CACHE[filename] - - reloaded = False - mtime = os.path.getmtime(filename) - cache_info = _FILE_CACHE.setdefault(filename, {}) - - if not cache_info or mtime > cache_info.get('mtime', 0): - LOG.debug(_("Reloading cached file %s") % filename) - with open(filename) as fap: - cache_info['data'] = fap.read() - cache_info['mtime'] = mtime - reloaded = True - return (reloaded, cache_info['data']) - - -def delete_if_exists(path, remove=os.unlink): - """Delete a file, but ignore file not found error. - - :param path: File to delete - :param remove: Optional function to remove passed path - """ - - try: - remove(path) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - -@contextlib.contextmanager -def remove_path_on_error(path, remove=delete_if_exists): - """Protect code that wants to operate on PATH atomically. - Any exception will cause PATH to be removed. - - :param path: File to work with - :param remove: Optional function to remove passed path - """ - - try: - yield - except Exception: - with excutils.save_and_reraise_exception(): - remove(path) - - -def file_open(*args, **kwargs): - """Open file - - see built-in file() documentation for more details - - Note: The reason this is kept in a separate module is to easily - be able to provide a stub module that doesn't alter system - state at all (for unit tests) - """ - return file(*args, **kwargs) - - -def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): - """Create temporary file or use existing file. - - This util is needed for creating temporary file with - specified content, suffix and prefix. If path is not None, - it will be used for writing content. If the path doesn't - exist it'll be created. - - :param content: content for temporary file. - :param path: same as parameter 'dir' for mkstemp - :param suffix: same as parameter 'suffix' for mkstemp - :param prefix: same as parameter 'prefix' for mkstemp - - For example: it can be used in database tests for creating - configuration files. - """ - if path: - ensure_tree(path) - - (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) - try: - os.write(fd, content) - finally: - os.close(fd) - return path diff --git a/billingstack/openstack/common/gettextutils.py b/billingstack/openstack/common/gettextutils.py deleted file mode 100644 index 7bd7183..0000000 --- a/billingstack/openstack/common/gettextutils.py +++ /dev/null @@ -1,373 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# Copyright 2013 IBM Corp. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -gettext for openstack-common modules. - -Usual usage in an openstack.common module: - - from billingstack.openstack.common.gettextutils import _ -""" - -import copy -import gettext -import logging -import os -import re -try: - import UserString as _userString -except ImportError: - import collections as _userString - -from babel import localedata -import six - -_localedir = os.environ.get('billingstack'.upper() + '_LOCALEDIR') -_t = gettext.translation('billingstack', localedir=_localedir, fallback=True) - -_AVAILABLE_LANGUAGES = {} -USE_LAZY = False - - -def enable_lazy(): - """Convenience function for configuring _() to use lazy gettext - - Call this at the start of execution to enable the gettextutils._ - function to use lazy gettext functionality. This is useful if - your project is importing _ directly instead of using the - gettextutils.install() way of importing the _ function. - """ - global USE_LAZY - USE_LAZY = True - - -def _(msg): - if USE_LAZY: - return Message(msg, 'billingstack') - else: - if six.PY3: - return _t.gettext(msg) - return _t.ugettext(msg) - - -def install(domain, lazy=False): - """Install a _() function using the given translation domain. - - Given a translation domain, install a _() function using gettext's - install() function. - - The main difference from gettext.install() is that we allow - overriding the default localedir (e.g. /usr/share/locale) using - a translation-domain-specific environment variable (e.g. - NOVA_LOCALEDIR). - - :param domain: the translation domain - :param lazy: indicates whether or not to install the lazy _() function. - The lazy _() introduces a way to do deferred translation - of messages by installing a _ that builds Message objects, - instead of strings, which can then be lazily translated into - any available locale. - """ - if lazy: - # NOTE(mrodden): Lazy gettext functionality. - # - # The following introduces a deferred way to do translations on - # messages in OpenStack. We override the standard _() function - # and % (format string) operation to build Message objects that can - # later be translated when we have more information. - # - # Also included below is an example LocaleHandler that translates - # Messages to an associated locale, effectively allowing many logs, - # each with their own locale. - - def _lazy_gettext(msg): - """Create and return a Message object. - - Lazy gettext function for a given domain, it is a factory method - for a project/module to get a lazy gettext function for its own - translation domain (i.e. nova, glance, cinder, etc.) - - Message encapsulates a string so that we can translate - it later when needed. - """ - return Message(msg, domain) - - from six import moves - moves.builtins.__dict__['_'] = _lazy_gettext - else: - localedir = '%s_LOCALEDIR' % domain.upper() - if six.PY3: - gettext.install(domain, - localedir=os.environ.get(localedir)) - else: - gettext.install(domain, - localedir=os.environ.get(localedir), - unicode=True) - - -class Message(_userString.UserString, object): - """Class used to encapsulate translatable messages.""" - def __init__(self, msg, domain): - # _msg is the gettext msgid and should never change - self._msg = msg - self._left_extra_msg = '' - self._right_extra_msg = '' - self._locale = None - self.params = None - self.domain = domain - - @property - def data(self): - # NOTE(mrodden): this should always resolve to a unicode string - # that best represents the state of the message currently - - localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR') - if self.locale: - lang = gettext.translation(self.domain, - localedir=localedir, - languages=[self.locale], - fallback=True) - else: - # use system locale for translations - lang = gettext.translation(self.domain, - localedir=localedir, - fallback=True) - - if six.PY3: - ugettext = lang.gettext - else: - ugettext = lang.ugettext - - full_msg = (self._left_extra_msg + - ugettext(self._msg) + - self._right_extra_msg) - - if self.params is not None: - full_msg = full_msg % self.params - - return six.text_type(full_msg) - - @property - def locale(self): - return self._locale - - @locale.setter - def locale(self, value): - self._locale = value - if not self.params: - return - - # This Message object may have been constructed with one or more - # Message objects as substitution parameters, given as a single - # Message, or a tuple or Map containing some, so when setting the - # locale for this Message we need to set it for those Messages too. - if isinstance(self.params, Message): - self.params.locale = value - return - if isinstance(self.params, tuple): - for param in self.params: - if isinstance(param, Message): - param.locale = value - return - if isinstance(self.params, dict): - for param in self.params.values(): - if isinstance(param, Message): - param.locale = value - - def _save_dictionary_parameter(self, dict_param): - full_msg = self.data - # look for %(blah) fields in string; - # ignore %% and deal with the - # case where % is first character on the line - keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg) - - # if we don't find any %(blah) blocks but have a %s - if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg): - # apparently the full dictionary is the parameter - params = copy.deepcopy(dict_param) - else: - params = {} - for key in keys: - try: - params[key] = copy.deepcopy(dict_param[key]) - except TypeError: - # cast uncopyable thing to unicode string - params[key] = six.text_type(dict_param[key]) - - return params - - def _save_parameters(self, other): - # we check for None later to see if - # we actually have parameters to inject, - # so encapsulate if our parameter is actually None - if other is None: - self.params = (other, ) - elif isinstance(other, dict): - self.params = self._save_dictionary_parameter(other) - else: - # fallback to casting to unicode, - # this will handle the problematic python code-like - # objects that cannot be deep-copied - try: - self.params = copy.deepcopy(other) - except TypeError: - self.params = six.text_type(other) - - return self - - # overrides to be more string-like - def __unicode__(self): - return self.data - - def __str__(self): - if six.PY3: - return self.__unicode__() - return self.data.encode('utf-8') - - def __getstate__(self): - to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg', - 'domain', 'params', '_locale'] - new_dict = self.__dict__.fromkeys(to_copy) - for attr in to_copy: - new_dict[attr] = copy.deepcopy(self.__dict__[attr]) - - return new_dict - - def __setstate__(self, state): - for (k, v) in state.items(): - setattr(self, k, v) - - # operator overloads - def __add__(self, other): - copied = copy.deepcopy(self) - copied._right_extra_msg += other.__str__() - return copied - - def __radd__(self, other): - copied = copy.deepcopy(self) - copied._left_extra_msg += other.__str__() - return copied - - def __mod__(self, other): - # do a format string to catch and raise - # any possible KeyErrors from missing parameters - self.data % other - copied = copy.deepcopy(self) - return copied._save_parameters(other) - - def __mul__(self, other): - return self.data * other - - def __rmul__(self, other): - return other * self.data - - def __getitem__(self, key): - return self.data[key] - - def __getslice__(self, start, end): - return self.data.__getslice__(start, end) - - def __getattribute__(self, name): - # NOTE(mrodden): handle lossy operations that we can't deal with yet - # These override the UserString implementation, since UserString - # uses our __class__ attribute to try and build a new message - # after running the inner data string through the operation. - # At that point, we have lost the gettext message id and can just - # safely resolve to a string instead. - ops = ['capitalize', 'center', 'decode', 'encode', - 'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip', - 'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill'] - if name in ops: - return getattr(self.data, name) - else: - return _userString.UserString.__getattribute__(self, name) - - -def get_available_languages(domain): - """Lists the available languages for the given translation domain. - - :param domain: the domain to get languages for - """ - if domain in _AVAILABLE_LANGUAGES: - return copy.copy(_AVAILABLE_LANGUAGES[domain]) - - localedir = '%s_LOCALEDIR' % domain.upper() - find = lambda x: gettext.find(domain, - localedir=os.environ.get(localedir), - languages=[x]) - - # NOTE(mrodden): en_US should always be available (and first in case - # order matters) since our in-line message strings are en_US - language_list = ['en_US'] - # NOTE(luisg): Babel <1.0 used a function called list(), which was - # renamed to locale_identifiers() in >=1.0, the requirements master list - # requires >=0.9.6, uncapped, so defensively work with both. We can remove - # this check when the master list updates to >=1.0, and update all projects - list_identifiers = (getattr(localedata, 'list', None) or - getattr(localedata, 'locale_identifiers')) - locale_identifiers = list_identifiers() - for i in locale_identifiers: - if find(i) is not None: - language_list.append(i) - _AVAILABLE_LANGUAGES[domain] = language_list - return copy.copy(language_list) - - -def get_localized_message(message, user_locale): - """Gets a localized version of the given message in the given locale. - - If the message is not a Message object the message is returned as-is. - If the locale is None the message is translated to the default locale. - - :returns: the translated message in unicode, or the original message if - it could not be translated - """ - translated = message - if isinstance(message, Message): - original_locale = message.locale - message.locale = user_locale - translated = six.text_type(message) - message.locale = original_locale - return translated - - -class LocaleHandler(logging.Handler): - """Handler that can have a locale associated to translate Messages. - - A quick example of how to utilize the Message class above. - LocaleHandler takes a locale and a target logging.Handler object - to forward LogRecord objects to after translating the internal Message. - """ - - def __init__(self, locale, target): - """Initialize a LocaleHandler - - :param locale: locale to use for translating messages - :param target: logging.Handler object to forward - LogRecord objects to after translation - """ - logging.Handler.__init__(self) - self.locale = locale - self.target = target - - def emit(self, record): - if isinstance(record.msg, Message): - # set the locale and resolve to a string - record.msg.locale = self.locale - - self.target.emit(record) diff --git a/billingstack/openstack/common/importutils.py b/billingstack/openstack/common/importutils.py deleted file mode 100644 index 7a303f9..0000000 --- a/billingstack/openstack/common/importutils.py +++ /dev/null @@ -1,68 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Import related utilities and helper functions. -""" - -import sys -import traceback - - -def import_class(import_str): - """Returns a class from a string including module and class.""" - mod_str, _sep, class_str = import_str.rpartition('.') - try: - __import__(mod_str) - return getattr(sys.modules[mod_str], class_str) - except (ValueError, AttributeError): - raise ImportError('Class %s cannot be found (%s)' % - (class_str, - traceback.format_exception(*sys.exc_info()))) - - -def import_object(import_str, *args, **kwargs): - """Import a class and return an instance of it.""" - return import_class(import_str)(*args, **kwargs) - - -def import_object_ns(name_space, import_str, *args, **kwargs): - """Tries to import object from default namespace. - - Imports a class and return an instance of it, first by trying - to find the class in a default namespace, then failing back to - a full path if not found in the default namespace. - """ - import_value = "%s.%s" % (name_space, import_str) - try: - return import_class(import_value)(*args, **kwargs) - except ImportError: - return import_class(import_str)(*args, **kwargs) - - -def import_module(import_str): - """Import a module.""" - __import__(import_str) - return sys.modules[import_str] - - -def try_import(import_str, default=None): - """Try to import a module and if it fails return default.""" - try: - return import_module(import_str) - except ImportError: - return default diff --git a/billingstack/openstack/common/iniparser.py b/billingstack/openstack/common/iniparser.py deleted file mode 100644 index 2412844..0000000 --- a/billingstack/openstack/common/iniparser.py +++ /dev/null @@ -1,130 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 OpenStack LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class ParseError(Exception): - def __init__(self, message, lineno, line): - self.msg = message - self.line = line - self.lineno = lineno - - def __str__(self): - return 'at line %d, %s: %r' % (self.lineno, self.msg, self.line) - - -class BaseParser(object): - lineno = 0 - parse_exc = ParseError - - def _assignment(self, key, value): - self.assignment(key, value) - return None, [] - - def _get_section(self, line): - if line[-1] != ']': - return self.error_no_section_end_bracket(line) - if len(line) <= 2: - return self.error_no_section_name(line) - - return line[1:-1] - - def _split_key_value(self, line): - colon = line.find(':') - equal = line.find('=') - if colon < 0 and equal < 0: - return self.error_invalid_assignment(line) - - if colon < 0 or (equal >= 0 and equal < colon): - key, value = line[:equal], line[equal + 1:] - else: - key, value = line[:colon], line[colon + 1:] - - value = value.strip() - if ((value and value[0] == value[-1]) and - (value[0] == "\"" or value[0] == "'")): - value = value[1:-1] - return key.strip(), [value] - - def parse(self, lineiter): - key = None - value = [] - - for line in lineiter: - self.lineno += 1 - - line = line.rstrip() - if not line: - # Blank line, ends multi-line values - if key: - key, value = self._assignment(key, value) - continue - elif line[0] in (' ', '\t'): - # Continuation of previous assignment - if key is None: - self.error_unexpected_continuation(line) - else: - value.append(line.lstrip()) - continue - - if key: - # Flush previous assignment, if any - key, value = self._assignment(key, value) - - if line[0] == '[': - # Section start - section = self._get_section(line) - if section: - self.new_section(section) - elif line[0] in '#;': - self.comment(line[1:].lstrip()) - else: - key, value = self._split_key_value(line) - if not key: - return self.error_empty_key(line) - - if key: - # Flush previous assignment, if any - self._assignment(key, value) - - def assignment(self, key, value): - """Called when a full assignment is parsed""" - raise NotImplementedError() - - def new_section(self, section): - """Called when a new section is started""" - raise NotImplementedError() - - def comment(self, comment): - """Called when a comment is parsed""" - pass - - def error_invalid_assignment(self, line): - raise self.parse_exc("No ':' or '=' found in assignment", - self.lineno, line) - - def error_empty_key(self, line): - raise self.parse_exc('Key cannot be empty', self.lineno, line) - - def error_unexpected_continuation(self, line): - raise self.parse_exc('Unexpected continuation line', - self.lineno, line) - - def error_no_section_end_bracket(self, line): - raise self.parse_exc('Invalid section (must end with ])', - self.lineno, line) - - def error_no_section_name(self, line): - raise self.parse_exc('Empty section name', self.lineno, line) diff --git a/billingstack/openstack/common/jsonutils.py b/billingstack/openstack/common/jsonutils.py deleted file mode 100644 index e8ab2d5..0000000 --- a/billingstack/openstack/common/jsonutils.py +++ /dev/null @@ -1,180 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -''' -JSON related utilities. - -This module provides a few things: - - 1) A handy function for getting an object down to something that can be - JSON serialized. See to_primitive(). - - 2) Wrappers around loads() and dumps(). The dumps() wrapper will - automatically use to_primitive() for you if needed. - - 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson - is available. -''' - - -import datetime -import functools -import inspect -import itertools -import json -try: - import xmlrpclib -except ImportError: - # NOTE(jd): xmlrpclib is not shipped with Python 3 - xmlrpclib = None - -import six - -from billingstack.openstack.common import gettextutils -from billingstack.openstack.common import importutils -from billingstack.openstack.common import timeutils - -netaddr = importutils.try_import("netaddr") - -_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod, - inspect.isfunction, inspect.isgeneratorfunction, - inspect.isgenerator, inspect.istraceback, inspect.isframe, - inspect.iscode, inspect.isbuiltin, inspect.isroutine, - inspect.isabstract] - -_simple_types = (six.string_types + six.integer_types - + (type(None), bool, float)) - - -def to_primitive(value, convert_instances=False, convert_datetime=True, - level=0, max_depth=3): - """Convert a complex object into primitives. - - Handy for JSON serialization. We can optionally handle instances, - but since this is a recursive function, we could have cyclical - data structures. - - To handle cyclical data structures we could track the actual objects - visited in a set, but not all objects are hashable. Instead we just - track the depth of the object inspections and don't go too deep. - - Therefore, convert_instances=True is lossy ... be aware. - - """ - # handle obvious types first - order of basic types determined by running - # full tests on nova project, resulting in the following counts: - # 572754 - # 460353 - # 379632 - # 274610 - # 199918 - # 114200 - # 51817 - # 26164 - # 6491 - # 283 - # 19 - if isinstance(value, _simple_types): - return value - - if isinstance(value, datetime.datetime): - if convert_datetime: - return timeutils.strtime(value) - else: - return value - - # value of itertools.count doesn't get caught by nasty_type_tests - # and results in infinite loop when list(value) is called. - if type(value) == itertools.count: - return six.text_type(value) - - # FIXME(vish): Workaround for LP bug 852095. Without this workaround, - # tests that raise an exception in a mocked method that - # has a @wrap_exception with a notifier will fail. If - # we up the dependency to 0.5.4 (when it is released) we - # can remove this workaround. - if getattr(value, '__module__', None) == 'mox': - return 'mock' - - if level > max_depth: - return '?' - - # The try block may not be necessary after the class check above, - # but just in case ... - try: - recursive = functools.partial(to_primitive, - convert_instances=convert_instances, - convert_datetime=convert_datetime, - level=level, - max_depth=max_depth) - if isinstance(value, dict): - return dict((k, recursive(v)) for k, v in value.iteritems()) - elif isinstance(value, (list, tuple)): - return [recursive(lv) for lv in value] - - # It's not clear why xmlrpclib created their own DateTime type, but - # for our purposes, make it a datetime type which is explicitly - # handled - if xmlrpclib and isinstance(value, xmlrpclib.DateTime): - value = datetime.datetime(*tuple(value.timetuple())[:6]) - - if convert_datetime and isinstance(value, datetime.datetime): - return timeutils.strtime(value) - elif isinstance(value, gettextutils.Message): - return value.data - elif hasattr(value, 'iteritems'): - return recursive(dict(value.iteritems()), level=level + 1) - elif hasattr(value, '__iter__'): - return recursive(list(value)) - elif convert_instances and hasattr(value, '__dict__'): - # Likely an instance of something. Watch for cycles. - # Ignore class member vars. - return recursive(value.__dict__, level=level + 1) - elif netaddr and isinstance(value, netaddr.IPAddress): - return six.text_type(value) - else: - if any(test(value) for test in _nasty_type_tests): - return six.text_type(value) - return value - except TypeError: - # Class objects are tricky since they may define something like - # __iter__ defined but it isn't callable as list(). - return six.text_type(value) - - -def dumps(value, default=to_primitive, **kwargs): - return json.dumps(value, default=default, **kwargs) - - -def loads(s): - return json.loads(s) - - -def load(s): - return json.load(s) - - -try: - import anyjson -except ImportError: - pass -else: - anyjson._modules.append((__name__, 'dumps', TypeError, - 'loads', ValueError, 'load')) - anyjson.force_implementation(__name__) diff --git a/billingstack/openstack/common/local.py b/billingstack/openstack/common/local.py deleted file mode 100644 index e82f17d..0000000 --- a/billingstack/openstack/common/local.py +++ /dev/null @@ -1,47 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Local storage of variables using weak references""" - -import threading -import weakref - - -class WeakLocal(threading.local): - def __getattribute__(self, attr): - rval = super(WeakLocal, self).__getattribute__(attr) - if rval: - # NOTE(mikal): this bit is confusing. What is stored is a weak - # reference, not the value itself. We therefore need to lookup - # the weak reference and return the inner value here. - rval = rval() - return rval - - def __setattr__(self, attr, value): - value = weakref.ref(value) - return super(WeakLocal, self).__setattr__(attr, value) - - -# NOTE(mikal): the name "store" should be deprecated in the future -store = WeakLocal() - -# A "weak" store uses weak references and allows an object to fall out of scope -# when it falls out of scope in the code that uses the thread local storage. A -# "strong" store will hold a reference to the object so that it never falls out -# of scope. -weak_store = WeakLocal() -strong_store = threading.local() diff --git a/billingstack/openstack/common/lockutils.py b/billingstack/openstack/common/lockutils.py deleted file mode 100644 index bd35ab5..0000000 --- a/billingstack/openstack/common/lockutils.py +++ /dev/null @@ -1,305 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import contextlib -import errno -import functools -import os -import shutil -import subprocess -import sys -import tempfile -import threading -import time -import weakref - -from oslo.config import cfg - -from billingstack.openstack.common import fileutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -util_opts = [ - cfg.BoolOpt('disable_process_locking', default=False, - help='Whether to disable inter-process locks'), - cfg.StrOpt('lock_path', - default=os.environ.get("BILLINGSTACK_LOCK_PATH"), - help=('Directory to use for lock files.')) -] - - -CONF = cfg.CONF -CONF.register_opts(util_opts) - - -def set_defaults(lock_path): - cfg.set_defaults(util_opts, lock_path=lock_path) - - -class _InterProcessLock(object): - """Lock implementation which allows multiple locks, working around - issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does - not require any cleanup. Since the lock is always held on a file - descriptor rather than outside of the process, the lock gets dropped - automatically if the process crashes, even if __exit__ is not executed. - - There are no guarantees regarding usage by multiple green threads in a - single process here. This lock works only between processes. Exclusive - access between local threads should be achieved using the semaphores - in the @synchronized decorator. - - Note these locks are released when the descriptor is closed, so it's not - safe to close the file descriptor while another green thread holds the - lock. Just opening and closing the lock file can break synchronisation, - so lock files must be accessed only using this abstraction. - """ - - def __init__(self, name): - self.lockfile = None - self.fname = name - - def __enter__(self): - self.lockfile = open(self.fname, 'w') - - while True: - try: - # Using non-blocking locks since green threads are not - # patched to deal with blocking locking calls. - # Also upon reading the MSDN docs for locking(), it seems - # to have a laughable 10 attempts "blocking" mechanism. - self.trylock() - return self - except IOError as e: - if e.errno in (errno.EACCES, errno.EAGAIN): - # external locks synchronise things like iptables - # updates - give it some time to prevent busy spinning - time.sleep(0.01) - else: - raise - - def __exit__(self, exc_type, exc_val, exc_tb): - try: - self.unlock() - self.lockfile.close() - except IOError: - LOG.exception(_("Could not release the acquired lock `%s`"), - self.fname) - - def trylock(self): - raise NotImplementedError() - - def unlock(self): - raise NotImplementedError() - - -class _WindowsLock(_InterProcessLock): - def trylock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) - - def unlock(self): - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) - - -class _PosixLock(_InterProcessLock): - def trylock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) - - def unlock(self): - fcntl.lockf(self.lockfile, fcntl.LOCK_UN) - - -if os.name == 'nt': - import msvcrt - InterProcessLock = _WindowsLock -else: - import fcntl - InterProcessLock = _PosixLock - -_semaphores = weakref.WeakValueDictionary() -_semaphores_lock = threading.Lock() - - -@contextlib.contextmanager -def lock(name, lock_file_prefix=None, external=False, lock_path=None): - """Context based lock - - This function yields a `threading.Semaphore` instance (if we don't use - eventlet.monkey_patch(), else `semaphore.Semaphore`) unless external is - True, in which case, it'll yield an InterProcessLock instance. - - :param lock_file_prefix: The lock_file_prefix argument is used to provide - lock files on disk with a meaningful prefix. - - :param external: The external keyword argument denotes whether this lock - should work across multiple processes. This means that if two different - workers both run a a method decorated with @synchronized('mylock', - external=True), only one of them will execute at a time. - - :param lock_path: The lock_path keyword argument is used to specify a - special location for external lock files to live. If nothing is set, then - CONF.lock_path is used as a default. - """ - with _semaphores_lock: - try: - sem = _semaphores[name] - except KeyError: - sem = threading.Semaphore() - _semaphores[name] = sem - - with sem: - LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name}) - - # NOTE(mikal): I know this looks odd - if not hasattr(local.strong_store, 'locks_held'): - local.strong_store.locks_held = [] - local.strong_store.locks_held.append(name) - - try: - if external and not CONF.disable_process_locking: - LOG.debug(_('Attempting to grab file lock "%(lock)s"'), - {'lock': name}) - - # We need a copy of lock_path because it is non-local - local_lock_path = lock_path or CONF.lock_path - if not local_lock_path: - raise cfg.RequiredOptError('lock_path') - - if not os.path.exists(local_lock_path): - fileutils.ensure_tree(local_lock_path) - LOG.info(_('Created lock path: %s'), local_lock_path) - - def add_prefix(name, prefix): - if not prefix: - return name - sep = '' if prefix.endswith('-') else '-' - return '%s%s%s' % (prefix, sep, name) - - # NOTE(mikal): the lock name cannot contain directory - # separators - lock_file_name = add_prefix(name.replace(os.sep, '_'), - lock_file_prefix) - - lock_file_path = os.path.join(local_lock_path, lock_file_name) - - try: - lock = InterProcessLock(lock_file_path) - with lock as lock: - LOG.debug(_('Got file lock "%(lock)s" at %(path)s'), - {'lock': name, 'path': lock_file_path}) - yield lock - finally: - LOG.debug(_('Released file lock "%(lock)s" at %(path)s'), - {'lock': name, 'path': lock_file_path}) - else: - yield sem - - finally: - local.strong_store.locks_held.remove(name) - - -def synchronized(name, lock_file_prefix=None, external=False, lock_path=None): - """Synchronization decorator. - - Decorating a method like so:: - - @synchronized('mylock') - def foo(self, *args): - ... - - ensures that only one thread will execute the foo method at a time. - - Different methods can share the same lock:: - - @synchronized('mylock') - def foo(self, *args): - ... - - @synchronized('mylock') - def bar(self, *args): - ... - - This way only one of either foo or bar can be executing at a time. - """ - - def wrap(f): - @functools.wraps(f) - def inner(*args, **kwargs): - try: - with lock(name, lock_file_prefix, external, lock_path): - LOG.debug(_('Got semaphore / lock "%(function)s"'), - {'function': f.__name__}) - return f(*args, **kwargs) - finally: - LOG.debug(_('Semaphore / lock released "%(function)s"'), - {'function': f.__name__}) - return inner - return wrap - - -def synchronized_with_prefix(lock_file_prefix): - """Partial object generator for the synchronization decorator. - - Redefine @synchronized in each project like so:: - - (in nova/utils.py) - from nova.openstack.common import lockutils - - synchronized = lockutils.synchronized_with_prefix('nova-') - - - (in nova/foo.py) - from nova import utils - - @utils.synchronized('mylock') - def bar(self, *args): - ... - - The lock_file_prefix argument is used to provide lock files on disk with a - meaningful prefix. - """ - - return functools.partial(synchronized, lock_file_prefix=lock_file_prefix) - - -def main(argv): - """Create a dir for locks and pass it to command from arguments - - If you run this: - python -m openstack.common.lockutils python setup.py testr - - a temporary directory will be created for all your locks and passed to all - your tests in an environment variable. The temporary dir will be deleted - afterwards and the return value will be preserved. - """ - - lock_dir = tempfile.mkdtemp() - os.environ["BILLINGSTACK_LOCK_PATH"] = lock_dir - try: - ret_val = subprocess.call(argv[1:]) - finally: - shutil.rmtree(lock_dir, ignore_errors=True) - return ret_val - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff --git a/billingstack/openstack/common/log.py b/billingstack/openstack/common/log.py deleted file mode 100644 index 5c0b093..0000000 --- a/billingstack/openstack/common/log.py +++ /dev/null @@ -1,626 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Openstack logging handler. - -This module adds to logging functionality by adding the option to specify -a context object when calling the various log methods. If the context object -is not specified, default formatting is used. Additionally, an instance uuid -may be passed as part of the log message, which is intended to make it easier -for admins to find messages related to a specific instance. - -It also allows setting of formatting information through conf. - -""" - -import inspect -import itertools -import logging -import logging.config -import logging.handlers -import os -import re -import sys -import traceback - -from oslo.config import cfg -import six -from six import moves - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import local - - -_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" - -_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password'] - -# NOTE(ldbragst): Let's build a list of regex objects using the list of -# _SANITIZE_KEYS we already have. This way, we only have to add the new key -# to the list of _SANITIZE_KEYS and we can generate regular expressions -# for XML and JSON automatically. -_SANITIZE_PATTERNS = [] -_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])', - r'(<%(key)s>).*?()', - r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])', - r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])'] - -for key in _SANITIZE_KEYS: - for pattern in _FORMAT_PATTERNS: - reg_ex = re.compile(pattern % {'key': key}, re.DOTALL) - _SANITIZE_PATTERNS.append(reg_ex) - - -common_cli_opts = [ - cfg.BoolOpt('debug', - short='d', - default=False, - help='Print debugging output (set logging level to ' - 'DEBUG instead of default WARNING level).'), - cfg.BoolOpt('verbose', - short='v', - default=False, - help='Print more verbose output (set logging level to ' - 'INFO instead of default WARNING level).'), -] - -logging_cli_opts = [ - cfg.StrOpt('log-config-append', - metavar='PATH', - deprecated_name='log-config', - help='The name of logging configuration file. It does not ' - 'disable existing loggers, but just appends specified ' - 'logging configuration to any other existing logging ' - 'options. Please see the Python logging module ' - 'documentation for details on logging configuration ' - 'files.'), - cfg.StrOpt('log-format', - default=None, - metavar='FORMAT', - help='DEPRECATED. ' - 'A logging.Formatter log message format string which may ' - 'use any of the available logging.LogRecord attributes. ' - 'This option is deprecated. Please use ' - 'logging_context_format_string and ' - 'logging_default_format_string instead.'), - cfg.StrOpt('log-date-format', - default=_DEFAULT_LOG_DATE_FORMAT, - metavar='DATE_FORMAT', - help='Format string for %%(asctime)s in log records. ' - 'Default: %(default)s'), - cfg.StrOpt('log-file', - metavar='PATH', - deprecated_name='logfile', - help='(Optional) Name of log file to output to. ' - 'If no default is set, logging will go to stdout.'), - cfg.StrOpt('log-dir', - deprecated_name='logdir', - help='(Optional) The base directory used for relative ' - '--log-file paths'), - cfg.BoolOpt('use-syslog', - default=False, - help='Use syslog for logging.'), - cfg.StrOpt('syslog-log-facility', - default='LOG_USER', - help='syslog facility to receive log lines') -] - -generic_log_opts = [ - cfg.BoolOpt('use_stderr', - default=True, - help='Log output to standard error') -] - -log_opts = [ - cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [%(request_id)s %(user)s %(tenant)s] ' - '%(instance)s%(message)s', - help='format string to use for log messages with context'), - cfg.StrOpt('logging_default_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [-] %(instance)s%(message)s', - help='format string to use for log messages without context'), - cfg.StrOpt('logging_debug_format_suffix', - default='%(funcName)s %(pathname)s:%(lineno)d', - help='data to append to log format when level is DEBUG'), - cfg.StrOpt('logging_exception_prefix', - default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' - '%(instance)s', - help='prefix each line of exception output with this format'), - cfg.ListOpt('default_log_levels', - default=[ - 'amqp=WARN', - 'amqplib=WARN', - 'boto=WARN', - 'keystone=INFO', - 'qpid=WARN', - 'sqlalchemy=WARN', - 'suds=INFO', - 'iso8601=WARN', - ], - help='list of logger=LEVEL pairs'), - cfg.BoolOpt('publish_errors', - default=False, - help='publish error events'), - cfg.BoolOpt('fatal_deprecations', - default=False, - help='make deprecations fatal'), - - # NOTE(mikal): there are two options here because sometimes we are handed - # a full instance (and could include more information), and other times we - # are just handed a UUID for the instance. - cfg.StrOpt('instance_format', - default='[instance: %(uuid)s] ', - help='If an instance is passed with the log message, format ' - 'it like this'), - cfg.StrOpt('instance_uuid_format', - default='[instance: %(uuid)s] ', - help='If an instance UUID is passed with the log message, ' - 'format it like this'), -] - -CONF = cfg.CONF -CONF.register_cli_opts(common_cli_opts) -CONF.register_cli_opts(logging_cli_opts) -CONF.register_opts(generic_log_opts) -CONF.register_opts(log_opts) - -# our new audit level -# NOTE(jkoelker) Since we synthesized an audit level, make the logging -# module aware of it so it acts like other levels. -logging.AUDIT = logging.INFO + 1 -logging.addLevelName(logging.AUDIT, 'AUDIT') - - -try: - NullHandler = logging.NullHandler -except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 - class NullHandler(logging.Handler): - def handle(self, record): - pass - - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - -def _dictify_context(context): - if context is None: - return None - if not isinstance(context, dict) and getattr(context, 'to_dict', None): - context = context.to_dict() - return context - - -def _get_binary_name(): - return os.path.basename(inspect.stack()[-1][1]) - - -def _get_log_file_path(binary=None): - logfile = CONF.log_file - logdir = CONF.log_dir - - if logfile and not logdir: - return logfile - - if logfile and logdir: - return os.path.join(logdir, logfile) - - if logdir: - binary = binary or _get_binary_name() - return '%s.log' % (os.path.join(logdir, binary),) - - return None - - -def mask_password(message, secret="***"): - """Replace password with 'secret' in message. - - :param message: The string which includes security information. - :param secret: value with which to replace passwords, defaults to "***". - :returns: The unicode value of message with the password fields masked. - - For example: - >>> mask_password("'adminPass' : 'aaaaa'") - "'adminPass' : '***'" - >>> mask_password("'admin_pass' : 'aaaaa'") - "'admin_pass' : '***'" - >>> mask_password('"password" : "aaaaa"') - '"password" : "***"' - >>> mask_password("'original_password' : 'aaaaa'") - "'original_password' : '***'" - >>> mask_password("u'original_password' : u'aaaaa'") - "u'original_password' : u'***'" - """ - message = six.text_type(message) - - # NOTE(ldbragst): Check to see if anything in message contains any key - # specified in _SANITIZE_KEYS, if not then just return the message since - # we don't have to mask any passwords. - if not any(key in message for key in _SANITIZE_KEYS): - return message - - secret = r'\g<1>' + secret + r'\g<2>' - for pattern in _SANITIZE_PATTERNS: - message = re.sub(pattern, secret, message) - return message - - -class BaseLoggerAdapter(logging.LoggerAdapter): - - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) - - -class LazyAdapter(BaseLoggerAdapter): - def __init__(self, name='unknown', version='unknown'): - self._logger = None - self.extra = {} - self.name = name - self.version = version - - @property - def logger(self): - if not self._logger: - self._logger = getLogger(self.name, self.version) - return self._logger - - -class ContextAdapter(BaseLoggerAdapter): - warn = logging.LoggerAdapter.warning - - def __init__(self, logger, project_name, version_string): - self.logger = logger - self.project = project_name - self.version = version_string - - @property - def handlers(self): - return self.logger.handlers - - def deprecated(self, msg, *args, **kwargs): - stdmsg = _("Deprecated: %s") % msg - if CONF.fatal_deprecations: - self.critical(stdmsg, *args, **kwargs) - raise DeprecatedConfig(msg=stdmsg) - else: - self.warn(stdmsg, *args, **kwargs) - - def process(self, msg, kwargs): - # NOTE(mrodden): catch any Message/other object and - # coerce to unicode before they can get - # to the python logging and possibly - # cause string encoding trouble - if not isinstance(msg, six.string_types): - msg = six.text_type(msg) - - if 'extra' not in kwargs: - kwargs['extra'] = {} - extra = kwargs['extra'] - - context = kwargs.pop('context', None) - if not context: - context = getattr(local.store, 'context', None) - if context: - extra.update(_dictify_context(context)) - - instance = kwargs.pop('instance', None) - instance_uuid = (extra.get('instance_uuid', None) or - kwargs.pop('instance_uuid', None)) - instance_extra = '' - if instance: - instance_extra = CONF.instance_format % instance - elif instance_uuid: - instance_extra = (CONF.instance_uuid_format - % {'uuid': instance_uuid}) - extra.update({'instance': instance_extra}) - - extra.update({"project": self.project}) - extra.update({"version": self.version}) - extra['extra'] = extra.copy() - return msg, kwargs - - -class JSONFormatter(logging.Formatter): - def __init__(self, fmt=None, datefmt=None): - # NOTE(jkoelker) we ignore the fmt argument, but its still there - # since logging.config.fileConfig passes it. - self.datefmt = datefmt - - def formatException(self, ei, strip_newlines=True): - lines = traceback.format_exception(*ei) - if strip_newlines: - lines = [itertools.ifilter( - lambda x: x, - line.rstrip().splitlines()) for line in lines] - lines = list(itertools.chain(*lines)) - return lines - - def format(self, record): - message = {'message': record.getMessage(), - 'asctime': self.formatTime(record, self.datefmt), - 'name': record.name, - 'msg': record.msg, - 'args': record.args, - 'levelname': record.levelname, - 'levelno': record.levelno, - 'pathname': record.pathname, - 'filename': record.filename, - 'module': record.module, - 'lineno': record.lineno, - 'funcname': record.funcName, - 'created': record.created, - 'msecs': record.msecs, - 'relative_created': record.relativeCreated, - 'thread': record.thread, - 'thread_name': record.threadName, - 'process_name': record.processName, - 'process': record.process, - 'traceback': None} - - if hasattr(record, 'extra'): - message['extra'] = record.extra - - if record.exc_info: - message['traceback'] = self.formatException(record.exc_info) - - return jsonutils.dumps(message) - - -def _create_logging_excepthook(product_name): - def logging_excepthook(type, value, tb): - extra = {} - if CONF.verbose: - extra['exc_info'] = (type, value, tb) - getLogger(product_name).critical(str(value), **extra) - return logging_excepthook - - -class LogConfigError(Exception): - - message = _('Error loading logging config %(log_config)s: %(err_msg)s') - - def __init__(self, log_config, err_msg): - self.log_config = log_config - self.err_msg = err_msg - - def __str__(self): - return self.message % dict(log_config=self.log_config, - err_msg=self.err_msg) - - -def _load_log_config(log_config_append): - try: - logging.config.fileConfig(log_config_append, - disable_existing_loggers=False) - except moves.configparser.Error as exc: - raise LogConfigError(log_config_append, str(exc)) - - -def setup(product_name): - """Setup logging.""" - if CONF.log_config_append: - _load_log_config(CONF.log_config_append) - else: - _setup_logging_from_conf() - sys.excepthook = _create_logging_excepthook(product_name) - - -def set_defaults(logging_context_format_string): - cfg.set_defaults(log_opts, - logging_context_format_string= - logging_context_format_string) - - -def _find_facility_from_conf(): - facility_names = logging.handlers.SysLogHandler.facility_names - facility = getattr(logging.handlers.SysLogHandler, - CONF.syslog_log_facility, - None) - - if facility is None and CONF.syslog_log_facility in facility_names: - facility = facility_names.get(CONF.syslog_log_facility) - - if facility is None: - valid_facilities = facility_names.keys() - consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', - 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', - 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', - 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', - 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] - valid_facilities.extend(consts) - raise TypeError(_('syslog facility must be one of: %s') % - ', '.join("'%s'" % fac - for fac in valid_facilities)) - - return facility - - -def _setup_logging_from_conf(): - log_root = getLogger(None).logger - for handler in log_root.handlers: - log_root.removeHandler(handler) - - if CONF.use_syslog: - facility = _find_facility_from_conf() - syslog = logging.handlers.SysLogHandler(address='/dev/log', - facility=facility) - log_root.addHandler(syslog) - - logpath = _get_log_file_path() - if logpath: - filelog = logging.handlers.WatchedFileHandler(logpath) - log_root.addHandler(filelog) - - if CONF.use_stderr: - streamlog = ColorHandler() - log_root.addHandler(streamlog) - - elif not CONF.log_file: - # pass sys.stdout as a positional argument - # python2.6 calls the argument strm, in 2.7 it's stream - streamlog = logging.StreamHandler(sys.stdout) - log_root.addHandler(streamlog) - - if CONF.publish_errors: - handler = importutils.import_object( - "billingstack.openstack.common.log_handler.PublishErrorsHandler", - logging.ERROR) - log_root.addHandler(handler) - - datefmt = CONF.log_date_format - for handler in log_root.handlers: - # NOTE(alaski): CONF.log_format overrides everything currently. This - # should be deprecated in favor of context aware formatting. - if CONF.log_format: - handler.setFormatter(logging.Formatter(fmt=CONF.log_format, - datefmt=datefmt)) - log_root.info('Deprecated: log_format is now deprecated and will ' - 'be removed in the next release') - else: - handler.setFormatter(ContextFormatter(datefmt=datefmt)) - - if CONF.debug: - log_root.setLevel(logging.DEBUG) - elif CONF.verbose: - log_root.setLevel(logging.INFO) - else: - log_root.setLevel(logging.WARNING) - - for pair in CONF.default_log_levels: - mod, _sep, level_name = pair.partition('=') - level = logging.getLevelName(level_name) - logger = logging.getLogger(mod) - logger.setLevel(level) - -_loggers = {} - - -def getLogger(name='unknown', version='unknown'): - if name not in _loggers: - _loggers[name] = ContextAdapter(logging.getLogger(name), - name, - version) - return _loggers[name] - - -def getLazyLogger(name='unknown', version='unknown'): - """Returns lazy logger. - - Creates a pass-through logger that does not create the real logger - until it is really needed and delegates all calls to the real logger - once it is created. - """ - return LazyAdapter(name, version) - - -class WritableLogger(object): - """A thin wrapper that responds to `write` and logs.""" - - def __init__(self, logger, level=logging.INFO): - self.logger = logger - self.level = level - - def write(self, msg): - self.logger.log(self.level, msg) - - -class ContextFormatter(logging.Formatter): - """A context.RequestContext aware formatter configured through flags. - - The flags used to set format strings are: logging_context_format_string - and logging_default_format_string. You can also specify - logging_debug_format_suffix to append extra formatting if the log level is - debug. - - For information about what variables are available for the formatter see: - http://docs.python.org/library/logging.html#formatter - - """ - - def format(self, record): - """Uses contextstring if request_id is set, otherwise default.""" - # NOTE(sdague): default the fancier formating params - # to an empty string so we don't throw an exception if - # they get used - for key in ('instance', 'color'): - if key not in record.__dict__: - record.__dict__[key] = '' - - if record.__dict__.get('request_id', None): - self._fmt = CONF.logging_context_format_string - else: - self._fmt = CONF.logging_default_format_string - - if (record.levelno == logging.DEBUG and - CONF.logging_debug_format_suffix): - self._fmt += " " + CONF.logging_debug_format_suffix - - # Cache this on the record, Logger will respect our formated copy - if record.exc_info: - record.exc_text = self.formatException(record.exc_info, record) - return logging.Formatter.format(self, record) - - def formatException(self, exc_info, record=None): - """Format exception output with CONF.logging_exception_prefix.""" - if not record: - return logging.Formatter.formatException(self, exc_info) - - stringbuffer = moves.StringIO() - traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], - None, stringbuffer) - lines = stringbuffer.getvalue().split('\n') - stringbuffer.close() - - if CONF.logging_exception_prefix.find('%(asctime)') != -1: - record.asctime = self.formatTime(record, self.datefmt) - - formatted_lines = [] - for line in lines: - pl = CONF.logging_exception_prefix % record.__dict__ - fl = '%s%s' % (pl, line) - formatted_lines.append(fl) - return '\n'.join(formatted_lines) - - -class ColorHandler(logging.StreamHandler): - LEVEL_COLORS = { - logging.DEBUG: '\033[00;32m', # GREEN - logging.INFO: '\033[00;36m', # CYAN - logging.AUDIT: '\033[01;36m', # BOLD CYAN - logging.WARN: '\033[01;33m', # BOLD YELLOW - logging.ERROR: '\033[01;31m', # BOLD RED - logging.CRITICAL: '\033[01;31m', # BOLD RED - } - - def format(self, record): - record.color = self.LEVEL_COLORS[record.levelno] - return logging.StreamHandler.format(self, record) - - -class DeprecatedConfig(Exception): - message = _("Fatal call to deprecated config: %(msg)s") - - def __init__(self, msg): - super(Exception, self).__init__(self.message % dict(msg=msg)) diff --git a/billingstack/openstack/common/loopingcall.py b/billingstack/openstack/common/loopingcall.py deleted file mode 100644 index a8de8f8..0000000 --- a/billingstack/openstack/common/loopingcall.py +++ /dev/null @@ -1,147 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from eventlet import event -from eventlet import greenthread - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import timeutils - -LOG = logging.getLogger(__name__) - - -class LoopingCallDone(Exception): - """Exception to break out and stop a LoopingCall. - - The poll-function passed to LoopingCall can raise this exception to - break out of the loop normally. This is somewhat analogous to - StopIteration. - - An optional return-value can be included as the argument to the exception; - this return-value will be returned by LoopingCall.wait() - - """ - - def __init__(self, retvalue=True): - """:param retvalue: Value that LoopingCall.wait() should return.""" - self.retvalue = retvalue - - -class LoopingCallBase(object): - def __init__(self, f=None, *args, **kw): - self.args = args - self.kw = kw - self.f = f - self._running = False - self.done = None - - def stop(self): - self._running = False - - def wait(self): - return self.done.wait() - - -class FixedIntervalLoopingCall(LoopingCallBase): - """A fixed interval looping call.""" - - def start(self, interval, initial_delay=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - start = timeutils.utcnow() - self.f(*self.args, **self.kw) - end = timeutils.utcnow() - if not self._running: - break - delay = interval - timeutils.delta_seconds(start, end) - if delay <= 0: - LOG.warn(_('task run outlasted interval by %s sec') % - -delay) - greenthread.sleep(delay if delay > 0 else 0) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_('in fixed duration looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn_n(_inner) - return self.done - - -# TODO(mikal): this class name is deprecated in Havana and should be removed -# in the I release -LoopingCall = FixedIntervalLoopingCall - - -class DynamicLoopingCall(LoopingCallBase): - """A looping call which sleeps until the next known event. - - The function called should return how long to sleep for before being - called again. - """ - - def start(self, initial_delay=None, periodic_interval_max=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - idle = self.f(*self.args, **self.kw) - if not self._running: - break - - if periodic_interval_max is not None: - idle = min(idle, periodic_interval_max) - LOG.debug(_('Dynamic looping call sleeping for %.02f ' - 'seconds'), idle) - greenthread.sleep(idle) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_('in dynamic looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn(_inner) - return self.done diff --git a/billingstack/openstack/common/network_utils.py b/billingstack/openstack/common/network_utils.py deleted file mode 100644 index dbed1ce..0000000 --- a/billingstack/openstack/common/network_utils.py +++ /dev/null @@ -1,81 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Network-related utilities and helper functions. -""" - -import urlparse - - -def parse_host_port(address, default_port=None): - """Interpret a string as a host:port pair. - - An IPv6 address MUST be escaped if accompanied by a port, - because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334 - means both [2001:db8:85a3::8a2e:370:7334] and - [2001:db8:85a3::8a2e:370]:7334. - - >>> parse_host_port('server01:80') - ('server01', 80) - >>> parse_host_port('server01') - ('server01', None) - >>> parse_host_port('server01', default_port=1234) - ('server01', 1234) - >>> parse_host_port('[::1]:80') - ('::1', 80) - >>> parse_host_port('[::1]') - ('::1', None) - >>> parse_host_port('[::1]', default_port=1234) - ('::1', 1234) - >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234) - ('2001:db8:85a3::8a2e:370:7334', 1234) - - """ - if address[0] == '[': - # Escaped ipv6 - _host, _port = address[1:].split(']') - host = _host - if ':' in _port: - port = _port.split(':')[1] - else: - port = default_port - else: - if address.count(':') == 1: - host, port = address.split(':') - else: - # 0 means ipv4, >1 means ipv6. - # We prohibit unescaped ipv6 addresses with port. - host = address - port = default_port - - return (host, None if port is None else int(port)) - - -def urlsplit(url, scheme='', allow_fragments=True): - """Parse a URL using urlparse.urlsplit(), splitting query and fragments. - This function papers over Python issue9374 when needed. - - The parameters are the same as urlparse.urlsplit. - """ - scheme, netloc, path, query, fragment = urlparse.urlsplit( - url, scheme, allow_fragments) - if allow_fragments and '#' in path: - path, fragment = path.split('#', 1) - if '?' in path: - path, query = path.split('?', 1) - return urlparse.SplitResult(scheme, netloc, path, query, fragment) diff --git a/billingstack/openstack/common/notifier/__init__.py b/billingstack/openstack/common/notifier/__init__.py deleted file mode 100644 index 45c3b46..0000000 --- a/billingstack/openstack/common/notifier/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/openstack/common/notifier/api.py b/billingstack/openstack/common/notifier/api.py deleted file mode 100644 index 894f1cb..0000000 --- a/billingstack/openstack/common/notifier/api.py +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import socket -import uuid - -from oslo.config import cfg - -from billingstack.openstack.common import context -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import timeutils - - -LOG = logging.getLogger(__name__) - -notifier_opts = [ - cfg.MultiStrOpt('notification_driver', - default=[], - help='Driver or drivers to handle sending notifications'), - cfg.StrOpt('default_notification_level', - default='INFO', - help='Default notification level for outgoing notifications'), - cfg.StrOpt('default_publisher_id', - default=None, - help='Default publisher_id for outgoing notifications'), -] - -CONF = cfg.CONF -CONF.register_opts(notifier_opts) - -WARN = 'WARN' -INFO = 'INFO' -ERROR = 'ERROR' -CRITICAL = 'CRITICAL' -DEBUG = 'DEBUG' - -log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL) - - -class BadPriorityException(Exception): - pass - - -def notify_decorator(name, fn): - """Decorator for notify which is used from utils.monkey_patch(). - - :param name: name of the function - :param function: - object of the function - :returns: function -- decorated function - - """ - def wrapped_func(*args, **kwarg): - body = {} - body['args'] = [] - body['kwarg'] = {} - for arg in args: - body['args'].append(arg) - for key in kwarg: - body['kwarg'][key] = kwarg[key] - - ctxt = context.get_context_from_function_and_args(fn, args, kwarg) - notify(ctxt, - CONF.default_publisher_id or socket.gethostname(), - name, - CONF.default_notification_level, - body) - return fn(*args, **kwarg) - return wrapped_func - - -def publisher_id(service, host=None): - if not host: - try: - host = CONF.host - except AttributeError: - host = CONF.default_publisher_id or socket.gethostname() - return "%s.%s" % (service, host) - - -def notify(context, publisher_id, event_type, priority, payload): - """Sends a notification using the specified driver - - :param publisher_id: the source worker_type.host of the message - :param event_type: the literal type of event (ex. Instance Creation) - :param priority: patterned after the enumeration of Python logging - levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL) - :param payload: A python dictionary of attributes - - Outgoing message format includes the above parameters, and appends the - following: - - message_id - a UUID representing the id for this notification - - timestamp - the GMT timestamp the notification was sent at - - The composite message will be constructed as a dictionary of the above - attributes, which will then be sent via the transport mechanism defined - by the driver. - - Message example:: - - {'message_id': str(uuid.uuid4()), - 'publisher_id': 'compute.host1', - 'timestamp': timeutils.utcnow(), - 'priority': 'WARN', - 'event_type': 'compute.create_instance', - 'payload': {'instance_id': 12, ... }} - - """ - if priority not in log_levels: - raise BadPriorityException( - _('%s not in valid priorities') % priority) - - # Ensure everything is JSON serializable. - payload = jsonutils.to_primitive(payload, convert_instances=True) - - msg = dict(message_id=str(uuid.uuid4()), - publisher_id=publisher_id, - event_type=event_type, - priority=priority, - payload=payload, - timestamp=str(timeutils.utcnow())) - - for driver in _get_drivers(): - try: - driver.notify(context, msg) - except Exception as e: - LOG.exception(_("Problem '%(e)s' attempting to " - "send to notification system. " - "Payload=%(payload)s") - % dict(e=e, payload=payload)) - - -_drivers = None - - -def _get_drivers(): - """Instantiate, cache, and return drivers based on the CONF.""" - global _drivers - if _drivers is None: - _drivers = {} - for notification_driver in CONF.notification_driver: - try: - driver = importutils.import_module(notification_driver) - _drivers[notification_driver] = driver - except ImportError: - LOG.exception(_("Failed to load notifier %s. " - "These notifications will not be sent.") % - notification_driver) - return _drivers.values() - - -def _reset_drivers(): - """Used by unit tests to reset the drivers.""" - global _drivers - _drivers = None diff --git a/billingstack/openstack/common/notifier/log_notifier.py b/billingstack/openstack/common/notifier/log_notifier.py deleted file mode 100644 index 4ce03e2..0000000 --- a/billingstack/openstack/common/notifier/log_notifier.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging - - -CONF = cfg.CONF - - -def notify(_context, message): - """Notifies the recipient of the desired event given the model. - - Log notifications using OpenStack's default logging system. - """ - - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - logger = logging.getLogger( - 'billingstack.openstack.common.notification.%s' % - message['event_type']) - getattr(logger, priority)(jsonutils.dumps(message)) diff --git a/billingstack/openstack/common/notifier/no_op_notifier.py b/billingstack/openstack/common/notifier/no_op_notifier.py deleted file mode 100644 index 13d946e..0000000 --- a/billingstack/openstack/common/notifier/no_op_notifier.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -def notify(_context, message): - """Notifies the recipient of the desired event given the model.""" - pass diff --git a/billingstack/openstack/common/notifier/rabbit_notifier.py b/billingstack/openstack/common/notifier/rabbit_notifier.py deleted file mode 100644 index 99bdd7b..0000000 --- a/billingstack/openstack/common/notifier/rabbit_notifier.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from billingstack.openstack.common import cfg -from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'notification_topics', default=['notifications', ], - help='AMQP topic used for openstack notifications') - -CONF = cfg.CONF -CONF.register_opt(notification_topic_opt) - - -def notify(context, message): - """Sends a notification to the RabbitMQ""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.notification_topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message) - except Exception, e: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), locals()) diff --git a/billingstack/openstack/common/notifier/rpc_notifier.py b/billingstack/openstack/common/notifier/rpc_notifier.py deleted file mode 100644 index 31e6d93..0000000 --- a/billingstack/openstack/common/notifier/rpc_notifier.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo.config import cfg - -from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'notification_topics', default=['notifications', ], - help='AMQP topic used for OpenStack notifications') - -CONF = cfg.CONF -CONF.register_opt(notification_topic_opt) - - -def notify(context, message): - """Sends a notification via RPC.""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.notification_topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message) - except Exception: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), - {"topic": topic, "message": message}) diff --git a/billingstack/openstack/common/notifier/rpc_notifier2.py b/billingstack/openstack/common/notifier/rpc_notifier2.py deleted file mode 100644 index 3474073..0000000 --- a/billingstack/openstack/common/notifier/rpc_notifier2.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -'''messaging based notification driver, with message envelopes''' - -from oslo.config import cfg - -from billingstack.openstack.common import context as req_context -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc - -LOG = logging.getLogger(__name__) - -notification_topic_opt = cfg.ListOpt( - 'topics', default=['notifications', ], - help='AMQP topic(s) used for OpenStack notifications') - -opt_group = cfg.OptGroup(name='rpc_notifier2', - title='Options for rpc_notifier2') - -CONF = cfg.CONF -CONF.register_group(opt_group) -CONF.register_opt(notification_topic_opt, opt_group) - - -def notify(context, message): - """Sends a notification via RPC.""" - if not context: - context = req_context.get_admin_context() - priority = message.get('priority', - CONF.default_notification_level) - priority = priority.lower() - for topic in CONF.rpc_notifier2.topics: - topic = '%s.%s' % (topic, priority) - try: - rpc.notify(context, topic, message, envelope=True) - except Exception: - LOG.exception(_("Could not send notification to %(topic)s. " - "Payload=%(message)s"), - {"topic": topic, "message": message}) diff --git a/billingstack/openstack/common/notifier/test_notifier.py b/billingstack/openstack/common/notifier/test_notifier.py deleted file mode 100644 index 96c1746..0000000 --- a/billingstack/openstack/common/notifier/test_notifier.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -NOTIFICATIONS = [] - - -def notify(_context, message): - """Test notifier, stores notifications in memory for unittests.""" - NOTIFICATIONS.append(message) diff --git a/billingstack/openstack/common/processutils.py b/billingstack/openstack/common/processutils.py deleted file mode 100644 index fdcb3d1..0000000 --- a/billingstack/openstack/common/processutils.py +++ /dev/null @@ -1,250 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import logging as stdlib_logging -import os -import random -import shlex -import signal - -from eventlet.green import subprocess -from eventlet import greenthread - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class InvalidArgumentError(Exception): - def __init__(self, message=None): - super(InvalidArgumentError, self).__init__(message) - - -class UnknownArgumentError(Exception): - def __init__(self, message=None): - super(UnknownArgumentError, self).__init__(message) - - -class ProcessExecutionError(Exception): - def __init__(self, stdout=None, stderr=None, exit_code=None, cmd=None, - description=None): - self.exit_code = exit_code - self.stderr = stderr - self.stdout = stdout - self.cmd = cmd - self.description = description - - if description is None: - description = "Unexpected error while running command." - if exit_code is None: - exit_code = '-' - message = ("%s\nCommand: %s\nExit code: %s\nStdout: %r\nStderr: %r" - % (description, cmd, exit_code, stdout, stderr)) - super(ProcessExecutionError, self).__init__(message) - - -class NoRootWrapSpecified(Exception): - def __init__(self, message=None): - super(NoRootWrapSpecified, self).__init__(message) - - -def _subprocess_setup(): - # Python installs a SIGPIPE handler by default. This is usually not what - # non-Python subprocesses expect. - signal.signal(signal.SIGPIPE, signal.SIG_DFL) - - -def execute(*cmd, **kwargs): - """Helper method to shell out and execute a command through subprocess. - - Allows optional retry. - - :param cmd: Passed to subprocess.Popen. - :type cmd: string - :param process_input: Send to opened process. - :type proces_input: string - :param check_exit_code: Single bool, int, or list of allowed exit - codes. Defaults to [0]. Raise - :class:`ProcessExecutionError` unless - program exits with one of these code. - :type check_exit_code: boolean, int, or [int] - :param delay_on_retry: True | False. Defaults to True. If set to True, - wait a short amount of time before retrying. - :type delay_on_retry: boolean - :param attempts: How many times to retry cmd. - :type attempts: int - :param run_as_root: True | False. Defaults to False. If set to True, - the command is prefixed by the command specified - in the root_helper kwarg. - :type run_as_root: boolean - :param root_helper: command to prefix to commands called with - run_as_root=True - :type root_helper: string - :param shell: whether or not there should be a shell used to - execute this command. Defaults to false. - :type shell: boolean - :param loglevel: log level for execute commands. - :type loglevel: int. (Should be stdlib_logging.DEBUG or - stdlib_logging.INFO) - :returns: (stdout, stderr) from process execution - :raises: :class:`UnknownArgumentError` on - receiving unknown arguments - :raises: :class:`ProcessExecutionError` - """ - - process_input = kwargs.pop('process_input', None) - check_exit_code = kwargs.pop('check_exit_code', [0]) - ignore_exit_code = False - delay_on_retry = kwargs.pop('delay_on_retry', True) - attempts = kwargs.pop('attempts', 1) - run_as_root = kwargs.pop('run_as_root', False) - root_helper = kwargs.pop('root_helper', '') - shell = kwargs.pop('shell', False) - loglevel = kwargs.pop('loglevel', stdlib_logging.DEBUG) - - if isinstance(check_exit_code, bool): - ignore_exit_code = not check_exit_code - check_exit_code = [0] - elif isinstance(check_exit_code, int): - check_exit_code = [check_exit_code] - - if kwargs: - raise UnknownArgumentError(_('Got unknown keyword args ' - 'to utils.execute: %r') % kwargs) - - if run_as_root and hasattr(os, 'geteuid') and os.geteuid() != 0: - if not root_helper: - raise NoRootWrapSpecified( - message=('Command requested root, but did not specify a root ' - 'helper.')) - cmd = shlex.split(root_helper) + list(cmd) - - cmd = map(str, cmd) - - while attempts > 0: - attempts -= 1 - try: - LOG.log(loglevel, _('Running cmd (subprocess): %s'), ' '.join(cmd)) - _PIPE = subprocess.PIPE # pylint: disable=E1101 - - if os.name == 'nt': - preexec_fn = None - close_fds = False - else: - preexec_fn = _subprocess_setup - close_fds = True - - obj = subprocess.Popen(cmd, - stdin=_PIPE, - stdout=_PIPE, - stderr=_PIPE, - close_fds=close_fds, - preexec_fn=preexec_fn, - shell=shell) - result = None - if process_input is not None: - result = obj.communicate(process_input) - else: - result = obj.communicate() - obj.stdin.close() # pylint: disable=E1101 - _returncode = obj.returncode # pylint: disable=E1101 - LOG.log(loglevel, _('Result was %s') % _returncode) - if not ignore_exit_code and _returncode not in check_exit_code: - (stdout, stderr) = result - raise ProcessExecutionError(exit_code=_returncode, - stdout=stdout, - stderr=stderr, - cmd=' '.join(cmd)) - return result - except ProcessExecutionError: - if not attempts: - raise - else: - LOG.log(loglevel, _('%r failed. Retrying.'), cmd) - if delay_on_retry: - greenthread.sleep(random.randint(20, 200) / 100.0) - finally: - # NOTE(termie): this appears to be necessary to let the subprocess - # call clean something up in between calls, without - # it two execute calls in a row hangs the second one - greenthread.sleep(0) - - -def trycmd(*args, **kwargs): - """A wrapper around execute() to more easily handle warnings and errors. - - Returns an (out, err) tuple of strings containing the output of - the command's stdout and stderr. If 'err' is not empty then the - command can be considered to have failed. - - :discard_warnings True | False. Defaults to False. If set to True, - then for succeeding commands, stderr is cleared - - """ - discard_warnings = kwargs.pop('discard_warnings', False) - - try: - out, err = execute(*args, **kwargs) - failed = False - except ProcessExecutionError as exn: - out, err = '', str(exn) - failed = True - - if not failed and discard_warnings and err: - # Handle commands that output to stderr but otherwise succeed - err = '' - - return out, err - - -def ssh_execute(ssh, cmd, process_input=None, - addl_env=None, check_exit_code=True): - LOG.debug(_('Running cmd (SSH): %s'), cmd) - if addl_env: - raise InvalidArgumentError(_('Environment not supported over SSH')) - - if process_input: - # This is (probably) fixable if we need it... - raise InvalidArgumentError(_('process_input not supported over SSH')) - - stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd) - channel = stdout_stream.channel - - # NOTE(justinsb): This seems suspicious... - # ...other SSH clients have buffering issues with this approach - stdout = stdout_stream.read() - stderr = stderr_stream.read() - stdin_stream.close() - - exit_status = channel.recv_exit_status() - - # exit_status == -1 if no exit code was returned - if exit_status != -1: - LOG.debug(_('Result was %s') % exit_status) - if check_exit_code and exit_status != 0: - raise ProcessExecutionError(exit_code=exit_status, - stdout=stdout, - stderr=stderr, - cmd=cmd) - - return (stdout, stderr) diff --git a/billingstack/openstack/common/rpc/__init__.py b/billingstack/openstack/common/rpc/__init__.py deleted file mode 100644 index 6d972aa..0000000 --- a/billingstack/openstack/common/rpc/__init__.py +++ /dev/null @@ -1,306 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -A remote procedure call (rpc) abstraction. - -For some wrappers that add message versioning to rpc, see: - rpc.dispatcher - rpc.proxy -""" - -import inspect - -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -rpc_opts = [ - cfg.StrOpt('rpc_backend', - default='%s.impl_kombu' % __package__, - help="The messaging module to use, defaults to kombu."), - cfg.IntOpt('rpc_thread_pool_size', - default=64, - help='Size of RPC thread pool'), - cfg.IntOpt('rpc_conn_pool_size', - default=30, - help='Size of RPC connection pool'), - cfg.IntOpt('rpc_response_timeout', - default=60, - help='Seconds to wait for a response from call or multicall'), - cfg.IntOpt('rpc_cast_timeout', - default=30, - help='Seconds to wait before a cast expires (TTL). ' - 'Only supported by impl_zmq.'), - cfg.ListOpt('allowed_rpc_exception_modules', - default=['nova.exception', - 'cinder.exception', - 'exceptions', - ], - help='Modules of exceptions that are permitted to be recreated' - ' upon receiving exception data from an rpc call.'), - cfg.BoolOpt('fake_rabbit', - default=False, - help='If passed, use a fake RabbitMQ provider'), - cfg.StrOpt('control_exchange', - default='openstack', - help='AMQP exchange to connect to if using RabbitMQ or Qpid'), -] - -CONF = cfg.CONF -CONF.register_opts(rpc_opts) - - -def set_defaults(control_exchange): - cfg.set_defaults(rpc_opts, - control_exchange=control_exchange) - - -def create_connection(new=True): - """Create a connection to the message bus used for rpc. - - For some example usage of creating a connection and some consumers on that - connection, see nova.service. - - :param new: Whether or not to create a new connection. A new connection - will be created by default. If new is False, the - implementation is free to return an existing connection from a - pool. - - :returns: An instance of openstack.common.rpc.common.Connection - """ - return _get_impl().create_connection(CONF, new=new) - - -def _check_for_lock(): - if not CONF.debug: - return None - - if ((hasattr(local.strong_store, 'locks_held') - and local.strong_store.locks_held)): - stack = ' :: '.join([frame[3] for frame in inspect.stack()]) - LOG.warn(_('A RPC is being made while holding a lock. The locks ' - 'currently held are %(locks)s. This is probably a bug. ' - 'Please report it. Include the following: [%(stack)s].'), - {'locks': local.strong_store.locks_held, - 'stack': stack}) - return True - - return False - - -def call(context, topic, msg, timeout=None, check_for_lock=False): - """Invoke a remote method that returns something. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=False. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - :param timeout: int, number of seconds to use for a response timeout. - If set, this overrides the rpc_response_timeout option. - :param check_for_lock: if True, a warning is emitted if a RPC call is made - with a lock held. - - :returns: A dict from the remote method. - - :raises: openstack.common.rpc.common.Timeout if a complete response - is not received before the timeout is reached. - """ - if check_for_lock: - _check_for_lock() - return _get_impl().call(CONF, context, topic, msg, timeout) - - -def cast(context, topic, msg): - """Invoke a remote method that does not return anything. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=False. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().cast(CONF, context, topic, msg) - - -def fanout_cast(context, topic, msg): - """Broadcast a remote method invocation with no return. - - This method will get invoked on all consumers that were set up with this - topic name and fanout=True. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=True. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().fanout_cast(CONF, context, topic, msg) - - -def multicall(context, topic, msg, timeout=None, check_for_lock=False): - """Invoke a remote method and get back an iterator. - - In this case, the remote method will be returning multiple values in - separate messages, so the return values can be processed as the come in via - an iterator. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the rpc message to. This correlates to the - topic argument of - openstack.common.rpc.common.Connection.create_consumer() - and only applies when the consumer was created with - fanout=False. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - :param timeout: int, number of seconds to use for a response timeout. - If set, this overrides the rpc_response_timeout option. - :param check_for_lock: if True, a warning is emitted if a RPC call is made - with a lock held. - - :returns: An iterator. The iterator will yield a tuple (N, X) where N is - an index that starts at 0 and increases by one for each value - returned and X is the Nth value that was returned by the remote - method. - - :raises: openstack.common.rpc.common.Timeout if a complete response - is not received before the timeout is reached. - """ - if check_for_lock: - _check_for_lock() - return _get_impl().multicall(CONF, context, topic, msg, timeout) - - -def notify(context, topic, msg, envelope=False): - """Send notification event. - - :param context: Information that identifies the user that has made this - request. - :param topic: The topic to send the notification to. - :param msg: This is a dict of content of event. - :param envelope: Set to True to enable message envelope for notifications. - - :returns: None - """ - return _get_impl().notify(cfg.CONF, context, topic, msg, envelope) - - -def cleanup(): - """Clean up resources in use by implementation. - - Clean up any resources that have been allocated by the RPC implementation. - This is typically open connections to a messaging service. This function - would get called before an application using this API exits to allow - connections to get torn down cleanly. - - :returns: None - """ - return _get_impl().cleanup() - - -def cast_to_server(context, server_params, topic, msg): - """Invoke a remote method that does not return anything. - - :param context: Information that identifies the user that has made this - request. - :param server_params: Connection information - :param topic: The topic to send the notification to. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().cast_to_server(CONF, context, server_params, topic, - msg) - - -def fanout_cast_to_server(context, server_params, topic, msg): - """Broadcast to a remote method invocation with no return. - - :param context: Information that identifies the user that has made this - request. - :param server_params: Connection information - :param topic: The topic to send the notification to. - :param msg: This is a dict in the form { "method" : "method_to_invoke", - "args" : dict_of_kwargs } - - :returns: None - """ - return _get_impl().fanout_cast_to_server(CONF, context, server_params, - topic, msg) - - -def queue_get_for(context, topic, host): - """Get a queue name for a given topic + host. - - This function only works if this naming convention is followed on the - consumer side, as well. For example, in nova, every instance of the - nova-foo service calls create_consumer() for two topics: - - foo - foo. - - Messages sent to the 'foo' topic are distributed to exactly one instance of - the nova-foo service. The services are chosen in a round-robin fashion. - Messages sent to the 'foo.' topic are sent to the nova-foo service on - . - """ - return '%s.%s' % (topic, host) if host else topic - - -_RPCIMPL = None - - -def _get_impl(): - """Delay import of rpc_backend until configuration is loaded.""" - global _RPCIMPL - if _RPCIMPL is None: - try: - _RPCIMPL = importutils.import_module(CONF.rpc_backend) - except ImportError: - # For backwards compatibility with older nova config. - impl = CONF.rpc_backend.replace('nova.rpc', - 'nova.openstack.common.rpc') - _RPCIMPL = importutils.import_module(impl) - return _RPCIMPL diff --git a/billingstack/openstack/common/rpc/amqp.py b/billingstack/openstack/common/rpc/amqp.py deleted file mode 100644 index 6206d36..0000000 --- a/billingstack/openstack/common/rpc/amqp.py +++ /dev/null @@ -1,636 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 - 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Shared code between AMQP based openstack.common.rpc implementations. - -The code in this module is shared between the rpc implementations based on -AMQP. Specifically, this includes impl_kombu and impl_qpid. impl_carrot also -uses AMQP, but is deprecated and predates this code. -""" - -import collections -import inspect -import sys -import uuid - -from eventlet import greenpool -from eventlet import pools -from eventlet import queue -from eventlet import semaphore -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import common as rpc_common - - -amqp_opts = [ - cfg.BoolOpt('amqp_durable_queues', - default=False, - deprecated_name='rabbit_durable_queues', - deprecated_group='DEFAULT', - help='Use durable queues in amqp.'), - cfg.BoolOpt('amqp_auto_delete', - default=False, - help='Auto-delete queues in amqp.'), -] - -cfg.CONF.register_opts(amqp_opts) - -UNIQUE_ID = '_unique_id' -LOG = logging.getLogger(__name__) - - -class Pool(pools.Pool): - """Class that implements a Pool of Connections.""" - def __init__(self, conf, connection_cls, *args, **kwargs): - self.connection_cls = connection_cls - self.conf = conf - kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size) - kwargs.setdefault("order_as_stack", True) - super(Pool, self).__init__(*args, **kwargs) - self.reply_proxy = None - - # TODO(comstud): Timeout connections not used in a while - def create(self): - LOG.debug(_('Pool creating new connection')) - return self.connection_cls(self.conf) - - def empty(self): - while self.free_items: - self.get().close() - # Force a new connection pool to be created. - # Note that this was added due to failing unit test cases. The issue - # is the above "while loop" gets all the cached connections from the - # pool and closes them, but never returns them to the pool, a pool - # leak. The unit tests hang waiting for an item to be returned to the - # pool. The unit tests get here via the tearDown() method. In the run - # time code, it gets here via cleanup() and only appears in service.py - # just before doing a sys.exit(), so cleanup() only happens once and - # the leakage is not a problem. - self.connection_cls.pool = None - - -_pool_create_sem = semaphore.Semaphore() - - -def get_connection_pool(conf, connection_cls): - with _pool_create_sem: - # Make sure only one thread tries to create the connection pool. - if not connection_cls.pool: - connection_cls.pool = Pool(conf, connection_cls) - return connection_cls.pool - - -class ConnectionContext(rpc_common.Connection): - """The class that is actually returned to the create_connection() caller. - - This is essentially a wrapper around Connection that supports 'with'. - It can also return a new Connection, or one from a pool. - - The function will also catch when an instance of this class is to be - deleted. With that we can return Connections to the pool on exceptions - and so forth without making the caller be responsible for catching them. - If possible the function makes sure to return a connection to the pool. - """ - - def __init__(self, conf, connection_pool, pooled=True, server_params=None): - """Create a new connection, or get one from the pool.""" - self.connection = None - self.conf = conf - self.connection_pool = connection_pool - if pooled: - self.connection = connection_pool.get() - else: - self.connection = connection_pool.connection_cls( - conf, - server_params=server_params) - self.pooled = pooled - - def __enter__(self): - """When with ConnectionContext() is used, return self.""" - return self - - def _done(self): - """If the connection came from a pool, clean it up and put it back. - If it did not come from a pool, close it. - """ - if self.connection: - if self.pooled: - # Reset the connection so it's ready for the next caller - # to grab from the pool - self.connection.reset() - self.connection_pool.put(self.connection) - else: - try: - self.connection.close() - except Exception: - pass - self.connection = None - - def __exit__(self, exc_type, exc_value, tb): - """End of 'with' statement. We're done here.""" - self._done() - - def __del__(self): - """Caller is done with this connection. Make sure we cleaned up.""" - self._done() - - def close(self): - """Caller is done with this connection.""" - self._done() - - def create_consumer(self, topic, proxy, fanout=False): - self.connection.create_consumer(topic, proxy, fanout) - - def create_worker(self, topic, proxy, pool_name): - self.connection.create_worker(topic, proxy, pool_name) - - def join_consumer_pool(self, callback, pool_name, topic, exchange_name, - ack_on_error=True): - self.connection.join_consumer_pool(callback, - pool_name, - topic, - exchange_name, - ack_on_error) - - def consume_in_thread(self): - self.connection.consume_in_thread() - - def __getattr__(self, key): - """Proxy all other calls to the Connection instance.""" - if self.connection: - return getattr(self.connection, key) - else: - raise rpc_common.InvalidRPCConnectionReuse() - - -class ReplyProxy(ConnectionContext): - """Connection class for RPC replies / callbacks.""" - def __init__(self, conf, connection_pool): - self._call_waiters = {} - self._num_call_waiters = 0 - self._num_call_waiters_wrn_threshold = 10 - self._reply_q = 'reply_' + uuid.uuid4().hex - super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False) - self.declare_direct_consumer(self._reply_q, self._process_data) - self.consume_in_thread() - - def _process_data(self, message_data): - msg_id = message_data.pop('_msg_id', None) - waiter = self._call_waiters.get(msg_id) - if not waiter: - LOG.warn(_('No calling threads waiting for msg_id : %(msg_id)s' - ', message : %(data)s'), {'msg_id': msg_id, - 'data': message_data}) - LOG.warn(_('_call_waiters: %s') % str(self._call_waiters)) - else: - waiter.put(message_data) - - def add_call_waiter(self, waiter, msg_id): - self._num_call_waiters += 1 - if self._num_call_waiters > self._num_call_waiters_wrn_threshold: - LOG.warn(_('Number of call waiters is greater than warning ' - 'threshold: %d. There could be a MulticallProxyWaiter ' - 'leak.') % self._num_call_waiters_wrn_threshold) - self._num_call_waiters_wrn_threshold *= 2 - self._call_waiters[msg_id] = waiter - - def del_call_waiter(self, msg_id): - self._num_call_waiters -= 1 - del self._call_waiters[msg_id] - - def get_reply_q(self): - return self._reply_q - - -def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None, - failure=None, ending=False, log_failure=True): - """Sends a reply or an error on the channel signified by msg_id. - - Failure should be a sys.exc_info() tuple. - - """ - with ConnectionContext(conf, connection_pool) as conn: - if failure: - failure = rpc_common.serialize_remote_exception(failure, - log_failure) - - msg = {'result': reply, 'failure': failure} - if ending: - msg['ending'] = True - _add_unique_id(msg) - # If a reply_q exists, add the msg_id to the reply and pass the - # reply_q to direct_send() to use it as the response queue. - # Otherwise use the msg_id for backward compatibility. - if reply_q: - msg['_msg_id'] = msg_id - conn.direct_send(reply_q, rpc_common.serialize_msg(msg)) - else: - conn.direct_send(msg_id, rpc_common.serialize_msg(msg)) - - -class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call.""" - def __init__(self, **kwargs): - self.msg_id = kwargs.pop('msg_id', None) - self.reply_q = kwargs.pop('reply_q', None) - self.conf = kwargs.pop('conf') - super(RpcContext, self).__init__(**kwargs) - - def deepcopy(self): - values = self.to_dict() - values['conf'] = self.conf - values['msg_id'] = self.msg_id - values['reply_q'] = self.reply_q - return self.__class__(**values) - - def reply(self, reply=None, failure=None, ending=False, - connection_pool=None, log_failure=True): - if self.msg_id: - msg_reply(self.conf, self.msg_id, self.reply_q, connection_pool, - reply, failure, ending, log_failure) - if ending: - self.msg_id = None - - -def unpack_context(conf, msg): - """Unpack context from msg.""" - context_dict = {} - for key in list(msg.keys()): - # NOTE(vish): Some versions of python don't like unicode keys - # in kwargs. - key = str(key) - if key.startswith('_context_'): - value = msg.pop(key) - context_dict[key[9:]] = value - context_dict['msg_id'] = msg.pop('_msg_id', None) - context_dict['reply_q'] = msg.pop('_reply_q', None) - context_dict['conf'] = conf - ctx = RpcContext.from_dict(context_dict) - rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict()) - return ctx - - -def pack_context(msg, context): - """Pack context into msg. - - Values for message keys need to be less than 255 chars, so we pull - context out into a bunch of separate keys. If we want to support - more arguments in rabbit messages, we may want to do the same - for args at some point. - - """ - if isinstance(context, dict): - context_d = dict([('_context_%s' % key, value) - for (key, value) in context.iteritems()]) - else: - context_d = dict([('_context_%s' % key, value) - for (key, value) in context.to_dict().iteritems()]) - - msg.update(context_d) - - -class _MsgIdCache(object): - """This class checks any duplicate messages.""" - - # NOTE: This value is considered can be a configuration item, but - # it is not necessary to change its value in most cases, - # so let this value as static for now. - DUP_MSG_CHECK_SIZE = 16 - - def __init__(self, **kwargs): - self.prev_msgids = collections.deque([], - maxlen=self.DUP_MSG_CHECK_SIZE) - - def check_duplicate_message(self, message_data): - """AMQP consumers may read same message twice when exceptions occur - before ack is returned. This method prevents doing it. - """ - if UNIQUE_ID in message_data: - msg_id = message_data[UNIQUE_ID] - if msg_id not in self.prev_msgids: - self.prev_msgids.append(msg_id) - else: - raise rpc_common.DuplicateMessageError(msg_id=msg_id) - - -def _add_unique_id(msg): - """Add unique_id for checking duplicate messages.""" - unique_id = uuid.uuid4().hex - msg.update({UNIQUE_ID: unique_id}) - LOG.debug(_('UNIQUE_ID is %s.') % (unique_id)) - - -class _ThreadPoolWithWait(object): - """Base class for a delayed invocation manager. - - Used by the Connection class to start up green threads - to handle incoming messages. - """ - - def __init__(self, conf, connection_pool): - self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size) - self.connection_pool = connection_pool - self.conf = conf - - def wait(self): - """Wait for all callback threads to exit.""" - self.pool.waitall() - - -class CallbackWrapper(_ThreadPoolWithWait): - """Wraps a straight callback. - - Allows it to be invoked in a green thread. - """ - - def __init__(self, conf, callback, connection_pool, - wait_for_consumers=False): - """Initiates CallbackWrapper object. - - :param conf: cfg.CONF instance - :param callback: a callable (probably a function) - :param connection_pool: connection pool as returned by - get_connection_pool() - :param wait_for_consumers: wait for all green threads to - complete and raise the last - caught exception, if any. - - """ - super(CallbackWrapper, self).__init__( - conf=conf, - connection_pool=connection_pool, - ) - self.callback = callback - self.wait_for_consumers = wait_for_consumers - self.exc_info = None - - def _wrap(self, message_data, **kwargs): - """Wrap the callback invocation to catch exceptions. - """ - try: - self.callback(message_data, **kwargs) - except Exception: - self.exc_info = sys.exc_info() - - def __call__(self, message_data): - self.exc_info = None - self.pool.spawn_n(self._wrap, message_data) - - if self.wait_for_consumers: - self.pool.waitall() - if self.exc_info: - raise self.exc_info[1], None, self.exc_info[2] - - -class ProxyCallback(_ThreadPoolWithWait): - """Calls methods on a proxy object based on method and args.""" - - def __init__(self, conf, proxy, connection_pool): - super(ProxyCallback, self).__init__( - conf=conf, - connection_pool=connection_pool, - ) - self.proxy = proxy - self.msg_id_cache = _MsgIdCache() - - def __call__(self, message_data): - """Consumer callback to call a method on a proxy object. - - Parses the message for validity and fires off a thread to call the - proxy object method. - - Message data should be a dictionary with two keys: - method: string representing the method to call - args: dictionary of arg: value - - Example: {'method': 'echo', 'args': {'value': 42}} - - """ - # It is important to clear the context here, because at this point - # the previous context is stored in local.store.context - if hasattr(local.store, 'context'): - del local.store.context - rpc_common._safe_log(LOG.debug, _('received %s'), message_data) - self.msg_id_cache.check_duplicate_message(message_data) - ctxt = unpack_context(self.conf, message_data) - method = message_data.get('method') - args = message_data.get('args', {}) - version = message_data.get('version') - namespace = message_data.get('namespace') - if not method: - LOG.warn(_('no method for message: %s') % message_data) - ctxt.reply(_('No method for message: %s') % message_data, - connection_pool=self.connection_pool) - return - self.pool.spawn_n(self._process_data, ctxt, version, method, - namespace, args) - - def _process_data(self, ctxt, version, method, namespace, args): - """Process a message in a new thread. - - If the proxy object we have has a dispatch method - (see rpc.dispatcher.RpcDispatcher), pass it the version, - method, and args and let it dispatch as appropriate. If not, use - the old behavior of magically calling the specified method on the - proxy we have here. - """ - ctxt.update_store() - try: - rval = self.proxy.dispatch(ctxt, version, method, namespace, - **args) - # Check if the result was a generator - if inspect.isgenerator(rval): - for x in rval: - ctxt.reply(x, None, connection_pool=self.connection_pool) - else: - ctxt.reply(rval, None, connection_pool=self.connection_pool) - # This final None tells multicall that it is done. - ctxt.reply(ending=True, connection_pool=self.connection_pool) - except rpc_common.ClientException as e: - LOG.debug(_('Expected exception during message handling (%s)') % - e._exc_info[1]) - ctxt.reply(None, e._exc_info, - connection_pool=self.connection_pool, - log_failure=False) - except Exception: - # sys.exc_info() is deleted by LOG.exception(). - exc_info = sys.exc_info() - LOG.error(_('Exception during message handling'), - exc_info=exc_info) - ctxt.reply(None, exc_info, connection_pool=self.connection_pool) - - -class MulticallProxyWaiter(object): - def __init__(self, conf, msg_id, timeout, connection_pool): - self._msg_id = msg_id - self._timeout = timeout or conf.rpc_response_timeout - self._reply_proxy = connection_pool.reply_proxy - self._done = False - self._got_ending = False - self._conf = conf - self._dataqueue = queue.LightQueue() - # Add this caller to the reply proxy's call_waiters - self._reply_proxy.add_call_waiter(self, self._msg_id) - self.msg_id_cache = _MsgIdCache() - - def put(self, data): - self._dataqueue.put(data) - - def done(self): - if self._done: - return - self._done = True - # Remove this caller from reply proxy's call_waiters - self._reply_proxy.del_call_waiter(self._msg_id) - - def _process_data(self, data): - result = None - self.msg_id_cache.check_duplicate_message(data) - if data['failure']: - failure = data['failure'] - result = rpc_common.deserialize_remote_exception(self._conf, - failure) - elif data.get('ending', False): - self._got_ending = True - else: - result = data['result'] - return result - - def __iter__(self): - """Return a result until we get a reply with an 'ending' flag.""" - if self._done: - raise StopIteration - while True: - try: - data = self._dataqueue.get(timeout=self._timeout) - result = self._process_data(data) - except queue.Empty: - self.done() - raise rpc_common.Timeout() - except Exception: - with excutils.save_and_reraise_exception(): - self.done() - if self._got_ending: - self.done() - raise StopIteration - if isinstance(result, Exception): - self.done() - raise result - yield result - - -def create_connection(conf, new, connection_pool): - """Create a connection.""" - return ConnectionContext(conf, connection_pool, pooled=not new) - - -_reply_proxy_create_sem = semaphore.Semaphore() - - -def multicall(conf, context, topic, msg, timeout, connection_pool): - """Make a call that returns multiple times.""" - LOG.debug(_('Making synchronous call on %s ...'), topic) - msg_id = uuid.uuid4().hex - msg.update({'_msg_id': msg_id}) - LOG.debug(_('MSG_ID is %s') % (msg_id)) - _add_unique_id(msg) - pack_context(msg, context) - - with _reply_proxy_create_sem: - if not connection_pool.reply_proxy: - connection_pool.reply_proxy = ReplyProxy(conf, connection_pool) - msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()}) - wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool) - with ConnectionContext(conf, connection_pool) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout) - return wait_msg - - -def call(conf, context, topic, msg, timeout, connection_pool): - """Sends a message on a topic and wait for a response.""" - rv = multicall(conf, context, topic, msg, timeout, connection_pool) - # NOTE(vish): return the last result from the multicall - rv = list(rv) - if not rv: - return - return rv[-1] - - -def cast(conf, context, topic, msg, connection_pool): - """Sends a message on a topic without waiting for a response.""" - LOG.debug(_('Making asynchronous cast on %s...'), topic) - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg)) - - -def fanout_cast(conf, context, topic, msg, connection_pool): - """Sends a message on a fanout exchange without waiting for a response.""" - LOG.debug(_('Making asynchronous fanout cast...')) - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool) as conn: - conn.fanout_send(topic, rpc_common.serialize_msg(msg)) - - -def cast_to_server(conf, context, server_params, topic, msg, connection_pool): - """Sends a message on a topic to a specific server.""" - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool, pooled=False, - server_params=server_params) as conn: - conn.topic_send(topic, rpc_common.serialize_msg(msg)) - - -def fanout_cast_to_server(conf, context, server_params, topic, msg, - connection_pool): - """Sends a message on a fanout exchange to a specific server.""" - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool, pooled=False, - server_params=server_params) as conn: - conn.fanout_send(topic, rpc_common.serialize_msg(msg)) - - -def notify(conf, context, topic, msg, connection_pool, envelope): - """Sends a notification event on a topic.""" - LOG.debug(_('Sending %(event_type)s on %(topic)s'), - dict(event_type=msg.get('event_type'), - topic=topic)) - _add_unique_id(msg) - pack_context(msg, context) - with ConnectionContext(conf, connection_pool) as conn: - if envelope: - msg = rpc_common.serialize_msg(msg) - conn.notify_send(topic, msg) - - -def cleanup(connection_pool): - if connection_pool: - connection_pool.empty() - - -def get_control_exchange(conf): - return conf.control_exchange diff --git a/billingstack/openstack/common/rpc/common.py b/billingstack/openstack/common/rpc/common.py deleted file mode 100644 index b328715..0000000 --- a/billingstack/openstack/common/rpc/common.py +++ /dev/null @@ -1,506 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import sys -import traceback - -from oslo.config import cfg -import six - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import local -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import versionutils - - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -'''RPC Envelope Version. - -This version number applies to the top level structure of messages sent out. -It does *not* apply to the message payload, which must be versioned -independently. For example, when using rpc APIs, a version number is applied -for changes to the API being exposed over rpc. This version number is handled -in the rpc proxy and dispatcher modules. - -This version number applies to the message envelope that is used in the -serialization done inside the rpc layer. See serialize_msg() and -deserialize_msg(). - -The current message format (version 2.0) is very simple. It is: - - { - 'oslo.version': , - 'oslo.message': - } - -Message format version '1.0' is just considered to be the messages we sent -without a message envelope. - -So, the current message envelope just includes the envelope version. It may -eventually contain additional information, such as a signature for the message -payload. - -We will JSON encode the application message payload. The message envelope, -which includes the JSON encoded application message body, will be passed down -to the messaging libraries as a dict. -''' -_RPC_ENVELOPE_VERSION = '2.0' - -_VERSION_KEY = 'oslo.version' -_MESSAGE_KEY = 'oslo.message' - -_REMOTE_POSTFIX = '_Remote' - - -class RPCException(Exception): - msg_fmt = _("An unknown RPC related exception occurred.") - - def __init__(self, message=None, **kwargs): - self.kwargs = kwargs - - if not message: - try: - message = self.msg_fmt % kwargs - - except Exception: - # kwargs doesn't match a variable in the message - # log the issue and the kwargs - LOG.exception(_('Exception in string format operation')) - for name, value in kwargs.iteritems(): - LOG.error("%s: %s" % (name, value)) - # at least get the core message out if something happened - message = self.msg_fmt - - super(RPCException, self).__init__(message) - - -class RemoteError(RPCException): - """Signifies that a remote class has raised an exception. - - Contains a string representation of the type of the original exception, - the value of the original exception, and the traceback. These are - sent to the parent as a joined string so printing the exception - contains all of the relevant info. - - """ - msg_fmt = _("Remote error: %(exc_type)s %(value)s\n%(traceback)s.") - - def __init__(self, exc_type=None, value=None, traceback=None): - self.exc_type = exc_type - self.value = value - self.traceback = traceback - super(RemoteError, self).__init__(exc_type=exc_type, - value=value, - traceback=traceback) - - -class Timeout(RPCException): - """Signifies that a timeout has occurred. - - This exception is raised if the rpc_response_timeout is reached while - waiting for a response from the remote side. - """ - msg_fmt = _('Timeout while waiting on RPC response - ' - 'topic: "%(topic)s", RPC method: "%(method)s" ' - 'info: "%(info)s"') - - def __init__(self, info=None, topic=None, method=None): - """Initiates Timeout object. - - :param info: Extra info to convey to the user - :param topic: The topic that the rpc call was sent to - :param rpc_method_name: The name of the rpc method being - called - """ - self.info = info - self.topic = topic - self.method = method - super(Timeout, self).__init__( - None, - info=info or _(''), - topic=topic or _(''), - method=method or _('')) - - -class DuplicateMessageError(RPCException): - msg_fmt = _("Found duplicate message(%(msg_id)s). Skipping it.") - - -class InvalidRPCConnectionReuse(RPCException): - msg_fmt = _("Invalid reuse of an RPC connection.") - - -class UnsupportedRpcVersion(RPCException): - msg_fmt = _("Specified RPC version, %(version)s, not supported by " - "this endpoint.") - - -class UnsupportedRpcEnvelopeVersion(RPCException): - msg_fmt = _("Specified RPC envelope version, %(version)s, " - "not supported by this endpoint.") - - -class RpcVersionCapError(RPCException): - msg_fmt = _("Specified RPC version cap, %(version_cap)s, is too low") - - -class Connection(object): - """A connection, returned by rpc.create_connection(). - - This class represents a connection to the message bus used for rpc. - An instance of this class should never be created by users of the rpc API. - Use rpc.create_connection() instead. - """ - def close(self): - """Close the connection. - - This method must be called when the connection will no longer be used. - It will ensure that any resources associated with the connection, such - as a network connection, and cleaned up. - """ - raise NotImplementedError() - - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer on this connection. - - A consumer is associated with a message queue on the backend message - bus. The consumer will read messages from the queue, unpack them, and - dispatch them to the proxy object. The contents of the message pulled - off of the queue will determine which method gets called on the proxy - object. - - :param topic: This is a name associated with what to consume from. - Multiple instances of a service may consume from the same - topic. For example, all instances of nova-compute consume - from a queue called "compute". In that case, the - messages will get distributed amongst the consumers in a - round-robin fashion if fanout=False. If fanout=True, - every consumer associated with this topic will get a - copy of every message. - :param proxy: The object that will handle all incoming messages. - :param fanout: Whether or not this is a fanout topic. See the - documentation for the topic parameter for some - additional comments on this. - """ - raise NotImplementedError() - - def create_worker(self, topic, proxy, pool_name): - """Create a worker on this connection. - - A worker is like a regular consumer of messages directed to a - topic, except that it is part of a set of such consumers (the - "pool") which may run in parallel. Every pool of workers will - receive a given message, but only one worker in the pool will - be asked to process it. Load is distributed across the members - of the pool in round-robin fashion. - - :param topic: This is a name associated with what to consume from. - Multiple instances of a service may consume from the same - topic. - :param proxy: The object that will handle all incoming messages. - :param pool_name: String containing the name of the pool of workers - """ - raise NotImplementedError() - - def join_consumer_pool(self, callback, pool_name, topic, exchange_name): - """Register as a member of a group of consumers. - - Uses given topic from the specified exchange. - Exactly one member of a given pool will receive each message. - - A message will be delivered to multiple pools, if more than - one is created. - - :param callback: Callable to be invoked for each message. - :type callback: callable accepting one argument - :param pool_name: The name of the consumer pool. - :type pool_name: str - :param topic: The routing topic for desired messages. - :type topic: str - :param exchange_name: The name of the message exchange where - the client should attach. Defaults to - the configured exchange. - :type exchange_name: str - """ - raise NotImplementedError() - - def consume_in_thread(self): - """Spawn a thread to handle incoming messages. - - Spawn a thread that will be responsible for handling all incoming - messages for consumers that were set up on this connection. - - Message dispatching inside of this is expected to be implemented in a - non-blocking manner. An example implementation would be having this - thread pull messages in for all of the consumers, but utilize a thread - pool for dispatching the messages to the proxy objects. - """ - raise NotImplementedError() - - -def _safe_log(log_func, msg, msg_data): - """Sanitizes the msg_data field before logging.""" - SANITIZE = ['_context_auth_token', 'auth_token', 'new_pass'] - - def _fix_passwords(d): - """Sanitizes the password fields in the dictionary.""" - for k in d.iterkeys(): - if k.lower().find('password') != -1: - d[k] = '' - elif k.lower() in SANITIZE: - d[k] = '' - elif isinstance(d[k], dict): - _fix_passwords(d[k]) - return d - - return log_func(msg, _fix_passwords(copy.deepcopy(msg_data))) - - -def serialize_remote_exception(failure_info, log_failure=True): - """Prepares exception data to be sent over rpc. - - Failure_info should be a sys.exc_info() tuple. - - """ - tb = traceback.format_exception(*failure_info) - failure = failure_info[1] - if log_failure: - LOG.error(_("Returning exception %s to caller"), - six.text_type(failure)) - LOG.error(tb) - - kwargs = {} - if hasattr(failure, 'kwargs'): - kwargs = failure.kwargs - - # NOTE(matiu): With cells, it's possible to re-raise remote, remote - # exceptions. Lets turn it back into the original exception type. - cls_name = str(failure.__class__.__name__) - mod_name = str(failure.__class__.__module__) - if (cls_name.endswith(_REMOTE_POSTFIX) and - mod_name.endswith(_REMOTE_POSTFIX)): - cls_name = cls_name[:-len(_REMOTE_POSTFIX)] - mod_name = mod_name[:-len(_REMOTE_POSTFIX)] - - data = { - 'class': cls_name, - 'module': mod_name, - 'message': six.text_type(failure), - 'tb': tb, - 'args': failure.args, - 'kwargs': kwargs - } - - json_data = jsonutils.dumps(data) - - return json_data - - -def deserialize_remote_exception(conf, data): - failure = jsonutils.loads(str(data)) - - trace = failure.get('tb', []) - message = failure.get('message', "") + "\n" + "\n".join(trace) - name = failure.get('class') - module = failure.get('module') - - # NOTE(ameade): We DO NOT want to allow just any module to be imported, in - # order to prevent arbitrary code execution. - if module not in conf.allowed_rpc_exception_modules: - return RemoteError(name, failure.get('message'), trace) - - try: - mod = importutils.import_module(module) - klass = getattr(mod, name) - if not issubclass(klass, Exception): - raise TypeError("Can only deserialize Exceptions") - - failure = klass(*failure.get('args', []), **failure.get('kwargs', {})) - except (AttributeError, TypeError, ImportError): - return RemoteError(name, failure.get('message'), trace) - - ex_type = type(failure) - str_override = lambda self: message - new_ex_type = type(ex_type.__name__ + _REMOTE_POSTFIX, (ex_type,), - {'__str__': str_override, '__unicode__': str_override}) - new_ex_type.__module__ = '%s%s' % (module, _REMOTE_POSTFIX) - try: - # NOTE(ameade): Dynamically create a new exception type and swap it in - # as the new type for the exception. This only works on user defined - # Exceptions and not core python exceptions. This is important because - # we cannot necessarily change an exception message so we must override - # the __str__ method. - failure.__class__ = new_ex_type - except TypeError: - # NOTE(ameade): If a core exception then just add the traceback to the - # first exception argument. - failure.args = (message,) + failure.args[1:] - return failure - - -class CommonRpcContext(object): - def __init__(self, **kwargs): - self.values = kwargs - - def __getattr__(self, key): - try: - return self.values[key] - except KeyError: - raise AttributeError(key) - - def to_dict(self): - return copy.deepcopy(self.values) - - @classmethod - def from_dict(cls, values): - return cls(**values) - - def deepcopy(self): - return self.from_dict(self.to_dict()) - - def update_store(self): - local.store.context = self - - def elevated(self, read_deleted=None, overwrite=False): - """Return a version of this context with admin flag set.""" - # TODO(russellb) This method is a bit of a nova-ism. It makes - # some assumptions about the data in the request context sent - # across rpc, while the rest of this class does not. We could get - # rid of this if we changed the nova code that uses this to - # convert the RpcContext back to its native RequestContext doing - # something like nova.context.RequestContext.from_dict(ctxt.to_dict()) - - context = self.deepcopy() - context.values['is_admin'] = True - - context.values.setdefault('roles', []) - - if 'admin' not in context.values['roles']: - context.values['roles'].append('admin') - - if read_deleted is not None: - context.values['read_deleted'] = read_deleted - - return context - - -class ClientException(Exception): - """Encapsulates actual exception expected to be hit by a RPC proxy object. - - Merely instantiating it records the current exception information, which - will be passed back to the RPC client without exceptional logging. - """ - def __init__(self): - self._exc_info = sys.exc_info() - - -def catch_client_exception(exceptions, func, *args, **kwargs): - try: - return func(*args, **kwargs) - except Exception as e: - if type(e) in exceptions: - raise ClientException() - else: - raise - - -def client_exceptions(*exceptions): - """Decorator for manager methods that raise expected exceptions. - - Marking a Manager method with this decorator allows the declaration - of expected exceptions that the RPC layer should not consider fatal, - and not log as if they were generated in a real error scenario. Note - that this will cause listed exceptions to be wrapped in a - ClientException, which is used internally by the RPC layer. - """ - def outer(func): - def inner(*args, **kwargs): - return catch_client_exception(exceptions, func, *args, **kwargs) - return inner - return outer - - -# TODO(sirp): we should deprecate this in favor of -# using `versionutils.is_compatible` directly -def version_is_compatible(imp_version, version): - """Determine whether versions are compatible. - - :param imp_version: The version implemented - :param version: The version requested by an incoming message. - """ - return versionutils.is_compatible(version, imp_version) - - -def serialize_msg(raw_msg): - # NOTE(russellb) See the docstring for _RPC_ENVELOPE_VERSION for more - # information about this format. - msg = {_VERSION_KEY: _RPC_ENVELOPE_VERSION, - _MESSAGE_KEY: jsonutils.dumps(raw_msg)} - - return msg - - -def deserialize_msg(msg): - # NOTE(russellb): Hang on to your hats, this road is about to - # get a little bumpy. - # - # Robustness Principle: - # "Be strict in what you send, liberal in what you accept." - # - # At this point we have to do a bit of guessing about what it - # is we just received. Here is the set of possibilities: - # - # 1) We received a dict. This could be 2 things: - # - # a) Inspect it to see if it looks like a standard message envelope. - # If so, great! - # - # b) If it doesn't look like a standard message envelope, it could either - # be a notification, or a message from before we added a message - # envelope (referred to as version 1.0). - # Just return the message as-is. - # - # 2) It's any other non-dict type. Just return it and hope for the best. - # This case covers return values from rpc.call() from before message - # envelopes were used. (messages to call a method were always a dict) - - if not isinstance(msg, dict): - # See #2 above. - return msg - - base_envelope_keys = (_VERSION_KEY, _MESSAGE_KEY) - if not all(map(lambda key: key in msg, base_envelope_keys)): - # See #1.b above. - return msg - - # At this point we think we have the message envelope - # format we were expecting. (#1.a above) - - if not version_is_compatible(_RPC_ENVELOPE_VERSION, msg[_VERSION_KEY]): - raise UnsupportedRpcEnvelopeVersion(version=msg[_VERSION_KEY]) - - raw_msg = jsonutils.loads(msg[_MESSAGE_KEY]) - - return raw_msg diff --git a/billingstack/openstack/common/rpc/dispatcher.py b/billingstack/openstack/common/rpc/dispatcher.py deleted file mode 100644 index 05ce1d0..0000000 --- a/billingstack/openstack/common/rpc/dispatcher.py +++ /dev/null @@ -1,178 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Code for rpc message dispatching. - -Messages that come in have a version number associated with them. RPC API -version numbers are in the form: - - Major.Minor - -For a given message with version X.Y, the receiver must be marked as able to -handle messages of version A.B, where: - - A = X - - B >= Y - -The Major version number would be incremented for an almost completely new API. -The Minor version number would be incremented for backwards compatible changes -to an existing API. A backwards compatible change could be something like -adding a new method, adding an argument to an existing method (but not -requiring it), or changing the type for an existing argument (but still -handling the old type as well). - -The conversion over to a versioned API must be done on both the client side and -server side of the API at the same time. However, as the code stands today, -there can be both versioned and unversioned APIs implemented in the same code -base. - -EXAMPLES -======== - -Nova was the first project to use versioned rpc APIs. Consider the compute rpc -API as an example. The client side is in nova/compute/rpcapi.py and the server -side is in nova/compute/manager.py. - - -Example 1) Adding a new method. -------------------------------- - -Adding a new method is a backwards compatible change. It should be added to -nova/compute/manager.py, and RPC_API_VERSION should be bumped from X.Y to -X.Y+1. On the client side, the new method in nova/compute/rpcapi.py should -have a specific version specified to indicate the minimum API version that must -be implemented for the method to be supported. For example:: - - def get_host_uptime(self, ctxt, host): - topic = _compute_topic(self.topic, ctxt, host, None) - return self.call(ctxt, self.make_msg('get_host_uptime'), topic, - version='1.1') - -In this case, version '1.1' is the first version that supported the -get_host_uptime() method. - - -Example 2) Adding a new parameter. ----------------------------------- - -Adding a new parameter to an rpc method can be made backwards compatible. The -RPC_API_VERSION on the server side (nova/compute/manager.py) should be bumped. -The implementation of the method must not expect the parameter to be present.:: - - def some_remote_method(self, arg1, arg2, newarg=None): - # The code needs to deal with newarg=None for cases - # where an older client sends a message without it. - pass - -On the client side, the same changes should be made as in example 1. The -minimum version that supports the new parameter should be specified. -""" - -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common.rpc import serializer as rpc_serializer - - -class RpcDispatcher(object): - """Dispatch rpc messages according to the requested API version. - - This class can be used as the top level 'manager' for a service. It - contains a list of underlying managers that have an API_VERSION attribute. - """ - - def __init__(self, callbacks, serializer=None): - """Initialize the rpc dispatcher. - - :param callbacks: List of proxy objects that are an instance - of a class with rpc methods exposed. Each proxy - object should have an RPC_API_VERSION attribute. - :param serializer: The Serializer object that will be used to - deserialize arguments before the method call and - to serialize the result after it returns. - """ - self.callbacks = callbacks - if serializer is None: - serializer = rpc_serializer.NoOpSerializer() - self.serializer = serializer - super(RpcDispatcher, self).__init__() - - def _deserialize_args(self, context, kwargs): - """Helper method called to deserialize args before dispatch. - - This calls our serializer on each argument, returning a new set of - args that have been deserialized. - - :param context: The request context - :param kwargs: The arguments to be deserialized - :returns: A new set of deserialized args - """ - new_kwargs = dict() - for argname, arg in kwargs.iteritems(): - new_kwargs[argname] = self.serializer.deserialize_entity(context, - arg) - return new_kwargs - - def dispatch(self, ctxt, version, method, namespace, **kwargs): - """Dispatch a message based on a requested version. - - :param ctxt: The request context - :param version: The requested API version from the incoming message - :param method: The method requested to be called by the incoming - message. - :param namespace: The namespace for the requested method. If None, - the dispatcher will look for a method on a callback - object with no namespace set. - :param kwargs: A dict of keyword arguments to be passed to the method. - - :returns: Whatever is returned by the underlying method that gets - called. - """ - if not version: - version = '1.0' - - had_compatible = False - for proxyobj in self.callbacks: - # Check for namespace compatibility - try: - cb_namespace = proxyobj.RPC_API_NAMESPACE - except AttributeError: - cb_namespace = None - - if namespace != cb_namespace: - continue - - # Check for version compatibility - try: - rpc_api_version = proxyobj.RPC_API_VERSION - except AttributeError: - rpc_api_version = '1.0' - - is_compatible = rpc_common.version_is_compatible(rpc_api_version, - version) - had_compatible = had_compatible or is_compatible - - if not hasattr(proxyobj, method): - continue - if is_compatible: - kwargs = self._deserialize_args(ctxt, kwargs) - result = getattr(proxyobj, method)(ctxt, **kwargs) - return self.serializer.serialize_entity(ctxt, result) - - if had_compatible: - raise AttributeError("No such RPC function '%s'" % method) - else: - raise rpc_common.UnsupportedRpcVersion(version=version) diff --git a/billingstack/openstack/common/rpc/impl_fake.py b/billingstack/openstack/common/rpc/impl_fake.py deleted file mode 100644 index e68f67a..0000000 --- a/billingstack/openstack/common/rpc/impl_fake.py +++ /dev/null @@ -1,195 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Fake RPC implementation which calls proxy methods directly with no -queues. Casts will block, but this is very useful for tests. -""" - -import inspect -# NOTE(russellb): We specifically want to use json, not our own jsonutils. -# jsonutils has some extra logic to automatically convert objects to primitive -# types so that they can be serialized. We want to catch all cases where -# non-primitive types make it into this code and treat it as an error. -import json -import time - -import eventlet - -from billingstack.openstack.common.rpc import common as rpc_common - -CONSUMERS = {} - - -class RpcContext(rpc_common.CommonRpcContext): - def __init__(self, **kwargs): - super(RpcContext, self).__init__(**kwargs) - self._response = [] - self._done = False - - def deepcopy(self): - values = self.to_dict() - new_inst = self.__class__(**values) - new_inst._response = self._response - new_inst._done = self._done - return new_inst - - def reply(self, reply=None, failure=None, ending=False): - if ending: - self._done = True - if not self._done: - self._response.append((reply, failure)) - - -class Consumer(object): - def __init__(self, topic, proxy): - self.topic = topic - self.proxy = proxy - - def call(self, context, version, method, namespace, args, timeout): - done = eventlet.event.Event() - - def _inner(): - ctxt = RpcContext.from_dict(context.to_dict()) - try: - rval = self.proxy.dispatch(context, version, method, - namespace, **args) - res = [] - # Caller might have called ctxt.reply() manually - for (reply, failure) in ctxt._response: - if failure: - raise failure[0], failure[1], failure[2] - res.append(reply) - # if ending not 'sent'...we might have more data to - # return from the function itself - if not ctxt._done: - if inspect.isgenerator(rval): - for val in rval: - res.append(val) - else: - res.append(rval) - done.send(res) - except rpc_common.ClientException as e: - done.send_exception(e._exc_info[1]) - except Exception as e: - done.send_exception(e) - - thread = eventlet.greenthread.spawn(_inner) - - if timeout: - start_time = time.time() - while not done.ready(): - eventlet.greenthread.sleep(1) - cur_time = time.time() - if (cur_time - start_time) > timeout: - thread.kill() - raise rpc_common.Timeout() - - return done.wait() - - -class Connection(object): - """Connection object.""" - - def __init__(self): - self.consumers = [] - - def create_consumer(self, topic, proxy, fanout=False): - consumer = Consumer(topic, proxy) - self.consumers.append(consumer) - if topic not in CONSUMERS: - CONSUMERS[topic] = [] - CONSUMERS[topic].append(consumer) - - def close(self): - for consumer in self.consumers: - CONSUMERS[consumer.topic].remove(consumer) - self.consumers = [] - - def consume_in_thread(self): - pass - - -def create_connection(conf, new=True): - """Create a connection.""" - return Connection() - - -def check_serialize(msg): - """Make sure a message intended for rpc can be serialized.""" - json.dumps(msg) - - -def multicall(conf, context, topic, msg, timeout=None): - """Make a call that returns multiple times.""" - - check_serialize(msg) - - method = msg.get('method') - if not method: - return - args = msg.get('args', {}) - version = msg.get('version', None) - namespace = msg.get('namespace', None) - - try: - consumer = CONSUMERS[topic][0] - except (KeyError, IndexError): - raise rpc_common.Timeout("No consumers available") - else: - return consumer.call(context, version, method, namespace, args, - timeout) - - -def call(conf, context, topic, msg, timeout=None): - """Sends a message on a topic and wait for a response.""" - rv = multicall(conf, context, topic, msg, timeout) - # NOTE(vish): return the last result from the multicall - rv = list(rv) - if not rv: - return - return rv[-1] - - -def cast(conf, context, topic, msg): - check_serialize(msg) - try: - call(conf, context, topic, msg) - except Exception: - pass - - -def notify(conf, context, topic, msg, envelope): - check_serialize(msg) - - -def cleanup(): - pass - - -def fanout_cast(conf, context, topic, msg): - """Cast to all consumers of a topic.""" - check_serialize(msg) - method = msg.get('method') - if not method: - return - args = msg.get('args', {}) - version = msg.get('version', None) - namespace = msg.get('namespace', None) - - for consumer in CONSUMERS.get(topic, []): - try: - consumer.call(context, version, method, namespace, args, None) - except Exception: - pass diff --git a/billingstack/openstack/common/rpc/impl_kombu.py b/billingstack/openstack/common/rpc/impl_kombu.py deleted file mode 100644 index 717b8b9..0000000 --- a/billingstack/openstack/common/rpc/impl_kombu.py +++ /dev/null @@ -1,856 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import itertools -import socket -import ssl -import time -import uuid - -import eventlet -import greenlet -import kombu -import kombu.connection -import kombu.entity -import kombu.messaging -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import network_utils -from billingstack.openstack.common.rpc import amqp as rpc_amqp -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common import sslutils - -kombu_opts = [ - cfg.StrOpt('kombu_ssl_version', - default='', - help='SSL version to use (valid only if SSL enabled). ' - 'valid values are TLSv1, SSLv23 and SSLv3. SSLv2 may ' - 'be available on some distributions' - ), - cfg.StrOpt('kombu_ssl_keyfile', - default='', - help='SSL key file (valid only if SSL enabled)'), - cfg.StrOpt('kombu_ssl_certfile', - default='', - help='SSL cert file (valid only if SSL enabled)'), - cfg.StrOpt('kombu_ssl_ca_certs', - default='', - help=('SSL certification authority file ' - '(valid only if SSL enabled)')), - cfg.StrOpt('rabbit_host', - default='localhost', - help='The RabbitMQ broker address where a single node is used'), - cfg.IntOpt('rabbit_port', - default=5672, - help='The RabbitMQ broker port where a single node is used'), - cfg.ListOpt('rabbit_hosts', - default=['$rabbit_host:$rabbit_port'], - help='RabbitMQ HA cluster host:port pairs'), - cfg.BoolOpt('rabbit_use_ssl', - default=False, - help='connect over SSL for RabbitMQ'), - cfg.StrOpt('rabbit_userid', - default='guest', - help='the RabbitMQ userid'), - cfg.StrOpt('rabbit_password', - default='guest', - help='the RabbitMQ password', - secret=True), - cfg.StrOpt('rabbit_virtual_host', - default='/', - help='the RabbitMQ virtual host'), - cfg.IntOpt('rabbit_retry_interval', - default=1, - help='how frequently to retry connecting with RabbitMQ'), - cfg.IntOpt('rabbit_retry_backoff', - default=2, - help='how long to backoff for between retries when connecting ' - 'to RabbitMQ'), - cfg.IntOpt('rabbit_max_retries', - default=0, - help='maximum retries with trying to connect to RabbitMQ ' - '(the default of 0 implies an infinite retry count)'), - cfg.BoolOpt('rabbit_ha_queues', - default=False, - help='use H/A queues in RabbitMQ (x-ha-policy: all).' - 'You need to wipe RabbitMQ database when ' - 'changing this option.'), - -] - -cfg.CONF.register_opts(kombu_opts) - -LOG = rpc_common.LOG - - -def _get_queue_arguments(conf): - """Construct the arguments for declaring a queue. - - If the rabbit_ha_queues option is set, we declare a mirrored queue - as described here: - - http://www.rabbitmq.com/ha.html - - Setting x-ha-policy to all means that the queue will be mirrored - to all nodes in the cluster. - """ - return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {} - - -class ConsumerBase(object): - """Consumer base class.""" - - def __init__(self, channel, callback, tag, **kwargs): - """Declare a queue on an amqp channel. - - 'channel' is the amqp channel to use - 'callback' is the callback to call when messages are received - 'tag' is a unique ID for the consumer on the channel - - queue name, exchange name, and other kombu options are - passed in here as a dictionary. - """ - self.callback = callback - self.tag = str(tag) - self.kwargs = kwargs - self.queue = None - self.ack_on_error = kwargs.get('ack_on_error', True) - self.reconnect(channel) - - def reconnect(self, channel): - """Re-declare the queue after a rabbit reconnect.""" - self.channel = channel - self.kwargs['channel'] = channel - self.queue = kombu.entity.Queue(**self.kwargs) - self.queue.declare() - - def _callback_handler(self, message, callback): - """Call callback with deserialized message. - - Messages that are processed without exception are ack'ed. - - If the message processing generates an exception, it will be - ack'ed if ack_on_error=True. Otherwise it will be .requeue()'ed. - """ - - try: - msg = rpc_common.deserialize_msg(message.payload) - callback(msg) - except Exception: - if self.ack_on_error: - LOG.exception(_("Failed to process message" - " ... skipping it.")) - message.ack() - else: - LOG.exception(_("Failed to process message" - " ... will requeue.")) - message.requeue() - else: - message.ack() - - def consume(self, *args, **kwargs): - """Actually declare the consumer on the amqp channel. This will - start the flow of messages from the queue. Using the - Connection.iterconsume() iterator will process the messages, - calling the appropriate callback. - - If a callback is specified in kwargs, use that. Otherwise, - use the callback passed during __init__() - - If kwargs['nowait'] is True, then this call will block until - a message is read. - - """ - - options = {'consumer_tag': self.tag} - options['nowait'] = kwargs.get('nowait', False) - callback = kwargs.get('callback', self.callback) - if not callback: - raise ValueError("No callback defined") - - def _callback(raw_message): - message = self.channel.message_to_python(raw_message) - self._callback_handler(message, callback) - - self.queue.consume(*args, callback=_callback, **options) - - def cancel(self): - """Cancel the consuming from the queue, if it has started.""" - try: - self.queue.cancel(self.tag) - except KeyError as e: - # NOTE(comstud): Kludge to get around a amqplib bug - if str(e) != "u'%s'" % self.tag: - raise - self.queue = None - - -class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'.""" - - def __init__(self, conf, channel, msg_id, callback, tag, **kwargs): - """Init a 'direct' queue. - - 'channel' is the amqp channel to use - 'msg_id' is the msg_id to listen on - 'callback' is the callback to call when messages are received - 'tag' is a unique ID for the consumer on the channel - - Other kombu options may be passed - """ - # Default options - options = {'durable': False, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - exchange = kombu.entity.Exchange(name=msg_id, - type='direct', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(DirectConsumer, self).__init__(channel, - callback, - tag, - name=msg_id, - exchange=exchange, - routing_key=msg_id, - **options) - - -class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'.""" - - def __init__(self, conf, channel, topic, callback, tag, name=None, - exchange_name=None, **kwargs): - """Init a 'topic' queue. - - :param channel: the amqp channel to use - :param topic: the topic to listen on - :paramtype topic: str - :param callback: the callback to call when messages are received - :param tag: a unique ID for the consumer on the channel - :param name: optional queue name, defaults to topic - :paramtype name: str - - Other kombu options may be passed as keyword arguments - """ - # Default options - options = {'durable': conf.amqp_durable_queues, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': conf.amqp_auto_delete, - 'exclusive': False} - options.update(kwargs) - exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) - exchange = kombu.entity.Exchange(name=exchange_name, - type='topic', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(TopicConsumer, self).__init__(channel, - callback, - tag, - name=name or topic, - exchange=exchange, - routing_key=topic, - **options) - - -class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'.""" - - def __init__(self, conf, channel, topic, callback, tag, **kwargs): - """Init a 'fanout' queue. - - 'channel' is the amqp channel to use - 'topic' is the topic to listen on - 'callback' is the callback to call when messages are received - 'tag' is a unique ID for the consumer on the channel - - Other kombu options may be passed - """ - unique = uuid.uuid4().hex - exchange_name = '%s_fanout' % topic - queue_name = '%s_fanout_%s' % (topic, unique) - - # Default options - options = {'durable': False, - 'queue_arguments': _get_queue_arguments(conf), - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - exchange = kombu.entity.Exchange(name=exchange_name, type='fanout', - durable=options['durable'], - auto_delete=options['auto_delete']) - super(FanoutConsumer, self).__init__(channel, callback, tag, - name=queue_name, - exchange=exchange, - routing_key=topic, - **options) - - -class Publisher(object): - """Base Publisher class.""" - - def __init__(self, channel, exchange_name, routing_key, **kwargs): - """Init the Publisher class with the exchange_name, routing_key, - and other options - """ - self.exchange_name = exchange_name - self.routing_key = routing_key - self.kwargs = kwargs - self.reconnect(channel) - - def reconnect(self, channel): - """Re-establish the Producer after a rabbit reconnection.""" - self.exchange = kombu.entity.Exchange(name=self.exchange_name, - **self.kwargs) - self.producer = kombu.messaging.Producer(exchange=self.exchange, - channel=channel, - routing_key=self.routing_key) - - def send(self, msg, timeout=None): - """Send a message.""" - if timeout: - # - # AMQP TTL is in milliseconds when set in the header. - # - self.producer.publish(msg, headers={'ttl': (timeout * 1000)}) - else: - self.producer.publish(msg) - - -class DirectPublisher(Publisher): - """Publisher class for 'direct'.""" - def __init__(self, conf, channel, msg_id, **kwargs): - """init a 'direct' publisher. - - Kombu options may be passed as keyword args to override defaults - """ - - options = {'durable': False, - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - super(DirectPublisher, self).__init__(channel, msg_id, msg_id, - type='direct', **options) - - -class TopicPublisher(Publisher): - """Publisher class for 'topic'.""" - def __init__(self, conf, channel, topic, **kwargs): - """init a 'topic' publisher. - - Kombu options may be passed as keyword args to override defaults - """ - options = {'durable': conf.amqp_durable_queues, - 'auto_delete': conf.amqp_auto_delete, - 'exclusive': False} - options.update(kwargs) - exchange_name = rpc_amqp.get_control_exchange(conf) - super(TopicPublisher, self).__init__(channel, - exchange_name, - topic, - type='topic', - **options) - - -class FanoutPublisher(Publisher): - """Publisher class for 'fanout'.""" - def __init__(self, conf, channel, topic, **kwargs): - """init a 'fanout' publisher. - - Kombu options may be passed as keyword args to override defaults - """ - options = {'durable': False, - 'auto_delete': True, - 'exclusive': False} - options.update(kwargs) - super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic, - None, type='fanout', **options) - - -class NotifyPublisher(TopicPublisher): - """Publisher class for 'notify'.""" - - def __init__(self, conf, channel, topic, **kwargs): - self.durable = kwargs.pop('durable', conf.amqp_durable_queues) - self.queue_arguments = _get_queue_arguments(conf) - super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs) - - def reconnect(self, channel): - super(NotifyPublisher, self).reconnect(channel) - - # NOTE(jerdfelt): Normally the consumer would create the queue, but - # we do this to ensure that messages don't get dropped if the - # consumer is started after we do - queue = kombu.entity.Queue(channel=channel, - exchange=self.exchange, - durable=self.durable, - name=self.routing_key, - routing_key=self.routing_key, - queue_arguments=self.queue_arguments) - queue.declare() - - -class Connection(object): - """Connection object.""" - - pool = None - - def __init__(self, conf, server_params=None): - self.consumers = [] - self.consumer_thread = None - self.proxy_callbacks = [] - self.conf = conf - self.max_retries = self.conf.rabbit_max_retries - # Try forever? - if self.max_retries <= 0: - self.max_retries = None - self.interval_start = self.conf.rabbit_retry_interval - self.interval_stepping = self.conf.rabbit_retry_backoff - # max retry-interval = 30 seconds - self.interval_max = 30 - self.memory_transport = False - - if server_params is None: - server_params = {} - # Keys to translate from server_params to kombu params - server_params_to_kombu_params = {'username': 'userid'} - - ssl_params = self._fetch_ssl_params() - params_list = [] - for adr in self.conf.rabbit_hosts: - hostname, port = network_utils.parse_host_port( - adr, default_port=self.conf.rabbit_port) - - params = { - 'hostname': hostname, - 'port': port, - 'userid': self.conf.rabbit_userid, - 'password': self.conf.rabbit_password, - 'virtual_host': self.conf.rabbit_virtual_host, - } - - for sp_key, value in server_params.iteritems(): - p_key = server_params_to_kombu_params.get(sp_key, sp_key) - params[p_key] = value - - if self.conf.fake_rabbit: - params['transport'] = 'memory' - if self.conf.rabbit_use_ssl: - params['ssl'] = ssl_params - - params_list.append(params) - - self.params_list = params_list - - self.memory_transport = self.conf.fake_rabbit - - self.connection = None - self.reconnect() - - def _fetch_ssl_params(self): - """Handles fetching what ssl params should be used for the connection - (if any). - """ - ssl_params = dict() - - # http://docs.python.org/library/ssl.html - ssl.wrap_socket - if self.conf.kombu_ssl_version: - ssl_params['ssl_version'] = sslutils.validate_ssl_version( - self.conf.kombu_ssl_version) - if self.conf.kombu_ssl_keyfile: - ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile - if self.conf.kombu_ssl_certfile: - ssl_params['certfile'] = self.conf.kombu_ssl_certfile - if self.conf.kombu_ssl_ca_certs: - ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs - # We might want to allow variations in the - # future with this? - ssl_params['cert_reqs'] = ssl.CERT_REQUIRED - - # Return the extended behavior or just have the default behavior - return ssl_params or True - - def _connect(self, params): - """Connect to rabbit. Re-establish any queues that may have - been declared before if we are reconnecting. Exceptions should - be handled by the caller. - """ - if self.connection: - LOG.info(_("Reconnecting to AMQP server on " - "%(hostname)s:%(port)d") % params) - try: - self.connection.release() - except self.connection_errors: - pass - # Setting this in case the next statement fails, though - # it shouldn't be doing any network operations, yet. - self.connection = None - self.connection = kombu.connection.BrokerConnection(**params) - self.connection_errors = self.connection.connection_errors - if self.memory_transport: - # Kludge to speed up tests. - self.connection.transport.polling_interval = 0.0 - self.consumer_num = itertools.count(1) - self.connection.connect() - self.channel = self.connection.channel() - # work around 'memory' transport bug in 1.1.3 - if self.memory_transport: - self.channel._new_queue('ae.undeliver') - for consumer in self.consumers: - consumer.reconnect(self.channel) - LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') % - params) - - def reconnect(self): - """Handles reconnecting and re-establishing queues. - Will retry up to self.max_retries number of times. - self.max_retries = 0 means to retry forever. - Sleep between tries, starting at self.interval_start - seconds, backing off self.interval_stepping number of seconds - each attempt. - """ - - attempt = 0 - while True: - params = self.params_list[attempt % len(self.params_list)] - attempt += 1 - try: - self._connect(params) - return - except (IOError, self.connection_errors) as e: - pass - except Exception as e: - # NOTE(comstud): Unfortunately it's possible for amqplib - # to return an error not covered by its transport - # connection_errors in the case of a timeout waiting for - # a protocol response. (See paste link in LP888621) - # So, we check all exceptions for 'timeout' in them - # and try to reconnect in this case. - if 'timeout' not in str(e): - raise - - log_info = {} - log_info['err_str'] = str(e) - log_info['max_retries'] = self.max_retries - log_info.update(params) - - if self.max_retries and attempt == self.max_retries: - msg = _('Unable to connect to AMQP server on ' - '%(hostname)s:%(port)d after %(max_retries)d ' - 'tries: %(err_str)s') % log_info - LOG.error(msg) - raise rpc_common.RPCException(msg) - - if attempt == 1: - sleep_time = self.interval_start or 1 - elif attempt > 1: - sleep_time += self.interval_stepping - if self.interval_max: - sleep_time = min(sleep_time, self.interval_max) - - log_info['sleep_time'] = sleep_time - LOG.error(_('AMQP server on %(hostname)s:%(port)d is ' - 'unreachable: %(err_str)s. Trying again in ' - '%(sleep_time)d seconds.') % log_info) - time.sleep(sleep_time) - - def ensure(self, error_callback, method, *args, **kwargs): - while True: - try: - return method(*args, **kwargs) - except (self.connection_errors, socket.timeout, IOError) as e: - if error_callback: - error_callback(e) - except Exception as e: - # NOTE(comstud): Unfortunately it's possible for amqplib - # to return an error not covered by its transport - # connection_errors in the case of a timeout waiting for - # a protocol response. (See paste link in LP888621) - # So, we check all exceptions for 'timeout' in them - # and try to reconnect in this case. - if 'timeout' not in str(e): - raise - if error_callback: - error_callback(e) - self.reconnect() - - def get_channel(self): - """Convenience call for bin/clear_rabbit_queues.""" - return self.channel - - def close(self): - """Close/release this connection.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - self.connection.release() - self.connection = None - - def reset(self): - """Reset a connection so it can be used again.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - self.channel.close() - self.channel = self.connection.channel() - # work around 'memory' transport bug in 1.1.3 - if self.memory_transport: - self.channel._new_queue('ae.undeliver') - self.consumers = [] - - def declare_consumer(self, consumer_cls, topic, callback): - """Create a Consumer using the class that was passed in and - add it to our list of consumers - """ - - def _connect_error(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.error(_("Failed to declare consumer for topic '%(topic)s': " - "%(err_str)s") % log_info) - - def _declare_consumer(): - consumer = consumer_cls(self.conf, self.channel, topic, callback, - self.consumer_num.next()) - self.consumers.append(consumer) - return consumer - - return self.ensure(_connect_error, _declare_consumer) - - def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers.""" - - info = {'do_consume': True} - - def _error_callback(exc): - if isinstance(exc, socket.timeout): - LOG.debug(_('Timed out waiting for RPC response: %s') % - str(exc)) - raise rpc_common.Timeout() - else: - LOG.exception(_('Failed to consume message from queue: %s') % - str(exc)) - info['do_consume'] = True - - def _consume(): - if info['do_consume']: - queues_head = self.consumers[:-1] # not fanout. - queues_tail = self.consumers[-1] # fanout - for queue in queues_head: - queue.consume(nowait=True) - queues_tail.consume(nowait=False) - info['do_consume'] = False - return self.connection.drain_events(timeout=timeout) - - for iteration in itertools.count(0): - if limit and iteration >= limit: - raise StopIteration - yield self.ensure(_error_callback, _consume) - - def cancel_consumer_thread(self): - """Cancel a consumer thread.""" - if self.consumer_thread is not None: - self.consumer_thread.kill() - try: - self.consumer_thread.wait() - except greenlet.GreenletExit: - pass - self.consumer_thread = None - - def wait_on_proxy_callbacks(self): - """Wait for all proxy callback threads to exit.""" - for proxy_cb in self.proxy_callbacks: - proxy_cb.wait() - - def publisher_send(self, cls, topic, msg, timeout=None, **kwargs): - """Send to a publisher based on the publisher class.""" - - def _error_callback(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.exception(_("Failed to publish message to topic " - "'%(topic)s': %(err_str)s") % log_info) - - def _publish(): - publisher = cls(self.conf, self.channel, topic, **kwargs) - publisher.send(msg, timeout) - - self.ensure(_error_callback, _publish) - - def declare_direct_consumer(self, topic, callback): - """Create a 'direct' queue. - In nova's use, this is generally a msg_id queue used for - responses for call/multicall - """ - self.declare_consumer(DirectConsumer, topic, callback) - - def declare_topic_consumer(self, topic, callback=None, queue_name=None, - exchange_name=None, ack_on_error=True): - """Create a 'topic' consumer.""" - self.declare_consumer(functools.partial(TopicConsumer, - name=queue_name, - exchange_name=exchange_name, - ack_on_error=ack_on_error, - ), - topic, callback) - - def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer.""" - self.declare_consumer(FanoutConsumer, topic, callback) - - def direct_send(self, msg_id, msg): - """Send a 'direct' message.""" - self.publisher_send(DirectPublisher, msg_id, msg) - - def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message.""" - self.publisher_send(TopicPublisher, topic, msg, timeout) - - def fanout_send(self, topic, msg): - """Send a 'fanout' message.""" - self.publisher_send(FanoutPublisher, topic, msg) - - def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic.""" - self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs) - - def consume(self, limit=None): - """Consume from all queues/consumers.""" - it = self.iterconsume(limit=limit) - while True: - try: - it.next() - except StopIteration: - return - - def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread.""" - @excutils.forever_retry_uncaught_exceptions - def _consumer_thread(): - try: - self.consume() - except greenlet.GreenletExit: - return - if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn(_consumer_thread) - return self.consumer_thread - - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - - if fanout: - self.declare_fanout_consumer(topic, proxy_cb) - else: - self.declare_topic_consumer(topic, proxy_cb) - - def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - self.declare_topic_consumer(topic, proxy_cb, pool_name) - - def join_consumer_pool(self, callback, pool_name, topic, - exchange_name=None, ack_on_error=True): - """Register as a member of a group of consumers for a given topic from - the specified exchange. - - Exactly one member of a given pool will receive each message. - - A message will be delivered to multiple pools, if more than - one is created. - """ - callback_wrapper = rpc_amqp.CallbackWrapper( - conf=self.conf, - callback=callback, - connection_pool=rpc_amqp.get_connection_pool(self.conf, - Connection), - wait_for_consumers=not ack_on_error - ) - self.proxy_callbacks.append(callback_wrapper) - self.declare_topic_consumer( - queue_name=pool_name, - topic=topic, - exchange_name=exchange_name, - callback=callback_wrapper, - ack_on_error=ack_on_error, - ) - - -def create_connection(conf, new=True): - """Create a connection.""" - return rpc_amqp.create_connection( - conf, new, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def multicall(conf, context, topic, msg, timeout=None): - """Make a call that returns multiple times.""" - return rpc_amqp.multicall( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def call(conf, context, topic, msg, timeout=None): - """Sends a message on a topic and wait for a response.""" - return rpc_amqp.call( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast(conf, context, topic, msg): - """Sends a message on a topic without waiting for a response.""" - return rpc_amqp.cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast(conf, context, topic, msg): - """Sends a message on a fanout exchange without waiting for a response.""" - return rpc_amqp.fanout_cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a topic to a specific server.""" - return rpc_amqp.cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a fanout exchange to a specific server.""" - return rpc_amqp.fanout_cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def notify(conf, context, topic, msg, envelope): - """Sends a notification event on a topic.""" - return rpc_amqp.notify( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection), - envelope) - - -def cleanup(): - return rpc_amqp.cleanup(Connection.pool) diff --git a/billingstack/openstack/common/rpc/impl_qpid.py b/billingstack/openstack/common/rpc/impl_qpid.py deleted file mode 100644 index 59c9e67..0000000 --- a/billingstack/openstack/common/rpc/impl_qpid.py +++ /dev/null @@ -1,833 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# Copyright 2011 - 2012, Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import itertools -import time -import uuid - -import eventlet -import greenlet -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import amqp as rpc_amqp -from billingstack.openstack.common.rpc import common as rpc_common - -qpid_codec = importutils.try_import("qpid.codec010") -qpid_messaging = importutils.try_import("qpid.messaging") -qpid_exceptions = importutils.try_import("qpid.messaging.exceptions") - -LOG = logging.getLogger(__name__) - -qpid_opts = [ - cfg.StrOpt('qpid_hostname', - default='localhost', - help='Qpid broker hostname'), - cfg.IntOpt('qpid_port', - default=5672, - help='Qpid broker port'), - cfg.ListOpt('qpid_hosts', - default=['$qpid_hostname:$qpid_port'], - help='Qpid HA cluster host:port pairs'), - cfg.StrOpt('qpid_username', - default='', - help='Username for qpid connection'), - cfg.StrOpt('qpid_password', - default='', - help='Password for qpid connection', - secret=True), - cfg.StrOpt('qpid_sasl_mechanisms', - default='', - help='Space separated list of SASL mechanisms to use for auth'), - cfg.IntOpt('qpid_heartbeat', - default=60, - help='Seconds between connection keepalive heartbeats'), - cfg.StrOpt('qpid_protocol', - default='tcp', - help="Transport to use, either 'tcp' or 'ssl'"), - cfg.BoolOpt('qpid_tcp_nodelay', - default=True, - help='Disable Nagle algorithm'), - # NOTE(russellb) If any additional versions are added (beyond 1 and 2), - # this file could probably use some additional refactoring so that the - # differences between each version are split into different classes. - cfg.IntOpt('qpid_topology_version', - default=1, - help="The qpid topology version to use. Version 1 is what " - "was originally used by impl_qpid. Version 2 includes " - "some backwards-incompatible changes that allow broker " - "federation to work. Users should update to version 2 " - "when they are able to take everything down, as it " - "requires a clean break."), -] - -cfg.CONF.register_opts(qpid_opts) - -JSON_CONTENT_TYPE = 'application/json; charset=utf8' - - -def raise_invalid_topology_version(conf): - msg = (_("Invalid value for qpid_topology_version: %d") % - conf.qpid_topology_version) - LOG.error(msg) - raise Exception(msg) - - -class ConsumerBase(object): - """Consumer base class.""" - - def __init__(self, conf, session, callback, node_name, node_opts, - link_name, link_opts): - """Declare a queue on an amqp session. - - 'session' is the amqp session to use - 'callback' is the callback to call when messages are received - 'node_name' is the first part of the Qpid address string, before ';' - 'node_opts' will be applied to the "x-declare" section of "node" - in the address string. - 'link_name' goes into the "name" field of the "link" in the address - string - 'link_opts' will be applied to the "x-declare" section of "link" - in the address string. - """ - self.callback = callback - self.receiver = None - self.session = None - - if conf.qpid_topology_version == 1: - addr_opts = { - "create": "always", - "node": { - "type": "topic", - "x-declare": { - "durable": True, - "auto-delete": True, - }, - }, - "link": { - "name": link_name, - "durable": True, - "x-declare": { - "durable": False, - "auto-delete": True, - "exclusive": False, - }, - }, - } - addr_opts["node"]["x-declare"].update(node_opts) - elif conf.qpid_topology_version == 2: - addr_opts = { - "link": { - "x-declare": { - "auto-delete": True, - }, - }, - } - else: - raise_invalid_topology_version() - - addr_opts["link"]["x-declare"].update(link_opts) - - self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) - - self.connect(session) - - def connect(self, session): - """Declare the receiver on connect.""" - self._declare_receiver(session) - - def reconnect(self, session): - """Re-declare the receiver after a qpid reconnect.""" - self._declare_receiver(session) - - def _declare_receiver(self, session): - self.session = session - self.receiver = session.receiver(self.address) - self.receiver.capacity = 1 - - def _unpack_json_msg(self, msg): - """Load the JSON data in msg if msg.content_type indicates that it - is necessary. Put the loaded data back into msg.content and - update msg.content_type appropriately. - - A Qpid Message containing a dict will have a content_type of - 'amqp/map', whereas one containing a string that needs to be converted - back from JSON will have a content_type of JSON_CONTENT_TYPE. - - :param msg: a Qpid Message object - :returns: None - """ - if msg.content_type == JSON_CONTENT_TYPE: - msg.content = jsonutils.loads(msg.content) - msg.content_type = 'amqp/map' - - def consume(self): - """Fetch the message and pass it to the callback object.""" - message = self.receiver.fetch() - try: - self._unpack_json_msg(message) - msg = rpc_common.deserialize_msg(message.content) - self.callback(msg) - except Exception: - LOG.exception(_("Failed to process message... skipping it.")) - finally: - # TODO(sandy): Need support for optional ack_on_error. - self.session.acknowledge(message) - - def get_receiver(self): - return self.receiver - - def get_node_name(self): - return self.address.split(';')[0] - - -class DirectConsumer(ConsumerBase): - """Queue/consumer class for 'direct'.""" - - def __init__(self, conf, session, msg_id, callback): - """Init a 'direct' queue. - - 'session' is the amqp session to use - 'msg_id' is the msg_id to listen on - 'callback' is the callback to call when messages are received - """ - - link_opts = { - "auto-delete": conf.amqp_auto_delete, - "exclusive": True, - "durable": conf.amqp_durable_queues, - } - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (msg_id, msg_id) - node_opts = {"type": "direct"} - elif conf.qpid_topology_version == 2: - node_name = "amq.direct/%s" % msg_id - node_opts = {} - else: - raise_invalid_topology_version() - - super(DirectConsumer, self).__init__(conf, session, callback, - node_name, node_opts, msg_id, - link_opts) - - -class TopicConsumer(ConsumerBase): - """Consumer class for 'topic'.""" - - def __init__(self, conf, session, topic, callback, name=None, - exchange_name=None): - """Init a 'topic' queue. - - :param session: the amqp session to use - :param topic: is the topic to listen on - :paramtype topic: str - :param callback: the callback to call when messages are received - :param name: optional queue name, defaults to topic - """ - - exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf) - link_opts = { - "auto-delete": conf.amqp_auto_delete, - "durable": conf.amqp_durable_queues, - } - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (exchange_name, topic) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) - else: - raise_invalid_topology_version() - - super(TopicConsumer, self).__init__(conf, session, callback, node_name, - {}, name or topic, link_opts) - - -class FanoutConsumer(ConsumerBase): - """Consumer class for 'fanout'.""" - - def __init__(self, conf, session, topic, callback): - """Init a 'fanout' queue. - - 'session' is the amqp session to use - 'topic' is the topic to listen on - 'callback' is the callback to call when messages are received - """ - self.conf = conf - - link_opts = {"exclusive": True} - - if conf.qpid_topology_version == 1: - node_name = "%s_fanout" % topic - node_opts = {"durable": False, "type": "fanout"} - link_name = "%s_fanout_%s" % (topic, uuid.uuid4().hex) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/fanout/%s" % topic - node_opts = {} - link_name = "" - else: - raise_invalid_topology_version() - - super(FanoutConsumer, self).__init__(conf, session, callback, - node_name, node_opts, link_name, - link_opts) - - def reconnect(self, session): - topic = self.get_node_name().rpartition('_fanout')[0] - params = { - 'session': session, - 'topic': topic, - 'callback': self.callback, - } - - self.__init__(conf=self.conf, **params) - - super(FanoutConsumer, self).reconnect(session) - - -class Publisher(object): - """Base Publisher class.""" - - def __init__(self, conf, session, node_name, node_opts=None): - """Init the Publisher class with the exchange_name, routing_key, - and other options - """ - self.sender = None - self.session = session - - if conf.qpid_topology_version == 1: - addr_opts = { - "create": "always", - "node": { - "type": "topic", - "x-declare": { - "durable": False, - # auto-delete isn't implemented for exchanges in qpid, - # but put in here anyway - "auto-delete": True, - }, - }, - } - if node_opts: - addr_opts["node"]["x-declare"].update(node_opts) - - self.address = "%s ; %s" % (node_name, jsonutils.dumps(addr_opts)) - elif conf.qpid_topology_version == 2: - self.address = node_name - else: - raise_invalid_topology_version() - - self.reconnect(session) - - def reconnect(self, session): - """Re-establish the Sender after a reconnection.""" - self.sender = session.sender(self.address) - - def _pack_json_msg(self, msg): - """Qpid cannot serialize dicts containing strings longer than 65535 - characters. This function dumps the message content to a JSON - string, which Qpid is able to handle. - - :param msg: May be either a Qpid Message object or a bare dict. - :returns: A Qpid Message with its content field JSON encoded. - """ - try: - msg.content = jsonutils.dumps(msg.content) - except AttributeError: - # Need to have a Qpid message so we can set the content_type. - msg = qpid_messaging.Message(jsonutils.dumps(msg)) - msg.content_type = JSON_CONTENT_TYPE - return msg - - def send(self, msg): - """Send a message.""" - try: - # Check if Qpid can encode the message - check_msg = msg - if not hasattr(check_msg, 'content_type'): - check_msg = qpid_messaging.Message(msg) - content_type = check_msg.content_type - enc, dec = qpid_messaging.message.get_codec(content_type) - enc(check_msg.content) - except qpid_codec.CodecException: - # This means the message couldn't be serialized as a dict. - msg = self._pack_json_msg(msg) - self.sender.send(msg) - - -class DirectPublisher(Publisher): - """Publisher class for 'direct'.""" - def __init__(self, conf, session, msg_id): - """Init a 'direct' publisher.""" - - if conf.qpid_topology_version == 1: - node_name = msg_id - node_opts = {"type": "direct"} - elif conf.qpid_topology_version == 2: - node_name = "amq.direct/%s" % msg_id - node_opts = {} - else: - raise_invalid_topology_version() - - super(DirectPublisher, self).__init__(conf, session, node_name, - node_opts) - - -class TopicPublisher(Publisher): - """Publisher class for 'topic'.""" - def __init__(self, conf, session, topic): - """Init a 'topic' publisher. - """ - exchange_name = rpc_amqp.get_control_exchange(conf) - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (exchange_name, topic) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) - else: - raise_invalid_topology_version() - - super(TopicPublisher, self).__init__(conf, session, node_name) - - -class FanoutPublisher(Publisher): - """Publisher class for 'fanout'.""" - def __init__(self, conf, session, topic): - """Init a 'fanout' publisher. - """ - - if conf.qpid_topology_version == 1: - node_name = "%s_fanout" % topic - node_opts = {"type": "fanout"} - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/fanout/%s" % topic - node_opts = {} - else: - raise_invalid_topology_version() - - super(FanoutPublisher, self).__init__(conf, session, node_name, - node_opts) - - -class NotifyPublisher(Publisher): - """Publisher class for notifications.""" - def __init__(self, conf, session, topic): - """Init a 'topic' publisher. - """ - exchange_name = rpc_amqp.get_control_exchange(conf) - node_opts = {"durable": True} - - if conf.qpid_topology_version == 1: - node_name = "%s/%s" % (exchange_name, topic) - elif conf.qpid_topology_version == 2: - node_name = "amq.topic/topic/%s/%s" % (exchange_name, topic) - else: - raise_invalid_topology_version() - - super(NotifyPublisher, self).__init__(conf, session, node_name, - node_opts) - - -class Connection(object): - """Connection object.""" - - pool = None - - def __init__(self, conf, server_params=None): - if not qpid_messaging: - raise ImportError("Failed to import qpid.messaging") - - self.session = None - self.consumers = {} - self.consumer_thread = None - self.proxy_callbacks = [] - self.conf = conf - - if server_params and 'hostname' in server_params: - # NOTE(russellb) This enables support for cast_to_server. - server_params['qpid_hosts'] = [ - '%s:%d' % (server_params['hostname'], - server_params.get('port', 5672)) - ] - - params = { - 'qpid_hosts': self.conf.qpid_hosts, - 'username': self.conf.qpid_username, - 'password': self.conf.qpid_password, - } - params.update(server_params or {}) - - self.brokers = params['qpid_hosts'] - self.username = params['username'] - self.password = params['password'] - self.connection_create(self.brokers[0]) - self.reconnect() - - def connection_create(self, broker): - # Create the connection - this does not open the connection - self.connection = qpid_messaging.Connection(broker) - - # Check if flags are set and if so set them for the connection - # before we call open - self.connection.username = self.username - self.connection.password = self.password - - self.connection.sasl_mechanisms = self.conf.qpid_sasl_mechanisms - # Reconnection is done by self.reconnect() - self.connection.reconnect = False - self.connection.heartbeat = self.conf.qpid_heartbeat - self.connection.transport = self.conf.qpid_protocol - self.connection.tcp_nodelay = self.conf.qpid_tcp_nodelay - - def _register_consumer(self, consumer): - self.consumers[str(consumer.get_receiver())] = consumer - - def _lookup_consumer(self, receiver): - return self.consumers[str(receiver)] - - def reconnect(self): - """Handles reconnecting and re-establishing sessions and queues.""" - attempt = 0 - delay = 1 - while True: - # Close the session if necessary - if self.connection.opened(): - try: - self.connection.close() - except qpid_exceptions.ConnectionError: - pass - - broker = self.brokers[attempt % len(self.brokers)] - attempt += 1 - - try: - self.connection_create(broker) - self.connection.open() - except qpid_exceptions.ConnectionError as e: - msg_dict = dict(e=e, delay=delay) - msg = _("Unable to connect to AMQP server: %(e)s. " - "Sleeping %(delay)s seconds") % msg_dict - LOG.error(msg) - time.sleep(delay) - delay = min(2 * delay, 60) - else: - LOG.info(_('Connected to AMQP server on %s'), broker) - break - - self.session = self.connection.session() - - if self.consumers: - consumers = self.consumers - self.consumers = {} - - for consumer in consumers.itervalues(): - consumer.reconnect(self.session) - self._register_consumer(consumer) - - LOG.debug(_("Re-established AMQP queues")) - - def ensure(self, error_callback, method, *args, **kwargs): - while True: - try: - return method(*args, **kwargs) - except (qpid_exceptions.Empty, - qpid_exceptions.ConnectionError) as e: - if error_callback: - error_callback(e) - self.reconnect() - - def close(self): - """Close/release this connection.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - try: - self.connection.close() - except Exception: - # NOTE(dripton) Logging exceptions that happen during cleanup just - # causes confusion; there's really nothing useful we can do with - # them. - pass - self.connection = None - - def reset(self): - """Reset a connection so it can be used again.""" - self.cancel_consumer_thread() - self.wait_on_proxy_callbacks() - self.session.close() - self.session = self.connection.session() - self.consumers = {} - - def declare_consumer(self, consumer_cls, topic, callback): - """Create a Consumer using the class that was passed in and - add it to our list of consumers - """ - def _connect_error(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.error(_("Failed to declare consumer for topic '%(topic)s': " - "%(err_str)s") % log_info) - - def _declare_consumer(): - consumer = consumer_cls(self.conf, self.session, topic, callback) - self._register_consumer(consumer) - return consumer - - return self.ensure(_connect_error, _declare_consumer) - - def iterconsume(self, limit=None, timeout=None): - """Return an iterator that will consume from all queues/consumers.""" - - def _error_callback(exc): - if isinstance(exc, qpid_exceptions.Empty): - LOG.debug(_('Timed out waiting for RPC response: %s') % - str(exc)) - raise rpc_common.Timeout() - else: - LOG.exception(_('Failed to consume message from queue: %s') % - str(exc)) - - def _consume(): - nxt_receiver = self.session.next_receiver(timeout=timeout) - try: - self._lookup_consumer(nxt_receiver).consume() - except Exception: - LOG.exception(_("Error processing message. Skipping it.")) - - for iteration in itertools.count(0): - if limit and iteration >= limit: - raise StopIteration - yield self.ensure(_error_callback, _consume) - - def cancel_consumer_thread(self): - """Cancel a consumer thread.""" - if self.consumer_thread is not None: - self.consumer_thread.kill() - try: - self.consumer_thread.wait() - except greenlet.GreenletExit: - pass - self.consumer_thread = None - - def wait_on_proxy_callbacks(self): - """Wait for all proxy callback threads to exit.""" - for proxy_cb in self.proxy_callbacks: - proxy_cb.wait() - - def publisher_send(self, cls, topic, msg): - """Send to a publisher based on the publisher class.""" - - def _connect_error(exc): - log_info = {'topic': topic, 'err_str': str(exc)} - LOG.exception(_("Failed to publish message to topic " - "'%(topic)s': %(err_str)s") % log_info) - - def _publisher_send(): - publisher = cls(self.conf, self.session, topic) - publisher.send(msg) - - return self.ensure(_connect_error, _publisher_send) - - def declare_direct_consumer(self, topic, callback): - """Create a 'direct' queue. - In nova's use, this is generally a msg_id queue used for - responses for call/multicall - """ - self.declare_consumer(DirectConsumer, topic, callback) - - def declare_topic_consumer(self, topic, callback=None, queue_name=None, - exchange_name=None): - """Create a 'topic' consumer.""" - self.declare_consumer(functools.partial(TopicConsumer, - name=queue_name, - exchange_name=exchange_name, - ), - topic, callback) - - def declare_fanout_consumer(self, topic, callback): - """Create a 'fanout' consumer.""" - self.declare_consumer(FanoutConsumer, topic, callback) - - def direct_send(self, msg_id, msg): - """Send a 'direct' message.""" - self.publisher_send(DirectPublisher, msg_id, msg) - - def topic_send(self, topic, msg, timeout=None): - """Send a 'topic' message.""" - # - # We want to create a message with attributes, e.g. a TTL. We - # don't really need to keep 'msg' in its JSON format any longer - # so let's create an actual qpid message here and get some - # value-add on the go. - # - # WARNING: Request timeout happens to be in the same units as - # qpid's TTL (seconds). If this changes in the future, then this - # will need to be altered accordingly. - # - qpid_message = qpid_messaging.Message(content=msg, ttl=timeout) - self.publisher_send(TopicPublisher, topic, qpid_message) - - def fanout_send(self, topic, msg): - """Send a 'fanout' message.""" - self.publisher_send(FanoutPublisher, topic, msg) - - def notify_send(self, topic, msg, **kwargs): - """Send a notify message on a topic.""" - self.publisher_send(NotifyPublisher, topic, msg) - - def consume(self, limit=None): - """Consume from all queues/consumers.""" - it = self.iterconsume(limit=limit) - while True: - try: - it.next() - except StopIteration: - return - - def consume_in_thread(self): - """Consumer from all queues/consumers in a greenthread.""" - @excutils.forever_retry_uncaught_exceptions - def _consumer_thread(): - try: - self.consume() - except greenlet.GreenletExit: - return - if self.consumer_thread is None: - self.consumer_thread = eventlet.spawn(_consumer_thread) - return self.consumer_thread - - def create_consumer(self, topic, proxy, fanout=False): - """Create a consumer that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - - if fanout: - consumer = FanoutConsumer(self.conf, self.session, topic, proxy_cb) - else: - consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb) - - self._register_consumer(consumer) - - return consumer - - def create_worker(self, topic, proxy, pool_name): - """Create a worker that calls a method in a proxy object.""" - proxy_cb = rpc_amqp.ProxyCallback( - self.conf, proxy, - rpc_amqp.get_connection_pool(self.conf, Connection)) - self.proxy_callbacks.append(proxy_cb) - - consumer = TopicConsumer(self.conf, self.session, topic, proxy_cb, - name=pool_name) - - self._register_consumer(consumer) - - return consumer - - def join_consumer_pool(self, callback, pool_name, topic, - exchange_name=None, ack_on_error=True): - """Register as a member of a group of consumers for a given topic from - the specified exchange. - - Exactly one member of a given pool will receive each message. - - A message will be delivered to multiple pools, if more than - one is created. - """ - callback_wrapper = rpc_amqp.CallbackWrapper( - conf=self.conf, - callback=callback, - connection_pool=rpc_amqp.get_connection_pool(self.conf, - Connection), - wait_for_consumers=not ack_on_error - ) - self.proxy_callbacks.append(callback_wrapper) - - consumer = TopicConsumer(conf=self.conf, - session=self.session, - topic=topic, - callback=callback_wrapper, - name=pool_name, - exchange_name=exchange_name) - - self._register_consumer(consumer) - return consumer - - -def create_connection(conf, new=True): - """Create a connection.""" - return rpc_amqp.create_connection( - conf, new, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def multicall(conf, context, topic, msg, timeout=None): - """Make a call that returns multiple times.""" - return rpc_amqp.multicall( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def call(conf, context, topic, msg, timeout=None): - """Sends a message on a topic and wait for a response.""" - return rpc_amqp.call( - conf, context, topic, msg, timeout, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast(conf, context, topic, msg): - """Sends a message on a topic without waiting for a response.""" - return rpc_amqp.cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast(conf, context, topic, msg): - """Sends a message on a fanout exchange without waiting for a response.""" - return rpc_amqp.fanout_cast( - conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a topic to a specific server.""" - return rpc_amqp.cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def fanout_cast_to_server(conf, context, server_params, topic, msg): - """Sends a message on a fanout exchange to a specific server.""" - return rpc_amqp.fanout_cast_to_server( - conf, context, server_params, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection)) - - -def notify(conf, context, topic, msg, envelope): - """Sends a notification event on a topic.""" - return rpc_amqp.notify(conf, context, topic, msg, - rpc_amqp.get_connection_pool(conf, Connection), - envelope) - - -def cleanup(): - return rpc_amqp.cleanup(Connection.pool) diff --git a/billingstack/openstack/common/rpc/impl_zmq.py b/billingstack/openstack/common/rpc/impl_zmq.py deleted file mode 100644 index 63963df..0000000 --- a/billingstack/openstack/common/rpc/impl_zmq.py +++ /dev/null @@ -1,818 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import pprint -import re -import socket -import sys -import types -import uuid - -import eventlet -import greenlet -from oslo.config import cfg - -from billingstack.openstack.common import excutils -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common.rpc import common as rpc_common - -zmq = importutils.try_import('eventlet.green.zmq') - -# for convenience, are not modified. -pformat = pprint.pformat -Timeout = eventlet.timeout.Timeout -LOG = rpc_common.LOG -RemoteError = rpc_common.RemoteError -RPCException = rpc_common.RPCException - -zmq_opts = [ - cfg.StrOpt('rpc_zmq_bind_address', default='*', - help='ZeroMQ bind address. Should be a wildcard (*), ' - 'an ethernet interface, or IP. ' - 'The "host" option should point or resolve to this ' - 'address.'), - - # The module.Class to use for matchmaking. - cfg.StrOpt( - 'rpc_zmq_matchmaker', - default=('billingstack.openstack.common.rpc.' - 'matchmaker.MatchMakerLocalhost'), - help='MatchMaker driver', - ), - - # The following port is unassigned by IANA as of 2012-05-21 - cfg.IntOpt('rpc_zmq_port', default=9501, - help='ZeroMQ receiver listening port'), - - cfg.IntOpt('rpc_zmq_contexts', default=1, - help='Number of ZeroMQ contexts, defaults to 1'), - - cfg.IntOpt('rpc_zmq_topic_backlog', default=None, - help='Maximum number of ingress messages to locally buffer ' - 'per topic. Default is unlimited.'), - - cfg.StrOpt('rpc_zmq_ipc_dir', default='/var/run/openstack', - help='Directory for holding IPC sockets'), - - cfg.StrOpt('rpc_zmq_host', default=socket.gethostname(), - help='Name of this node. Must be a valid hostname, FQDN, or ' - 'IP address. Must match "host" option, if running Nova.') -] - - -CONF = cfg.CONF -CONF.register_opts(zmq_opts) - -ZMQ_CTX = None # ZeroMQ Context, must be global. -matchmaker = None # memoized matchmaker object - - -def _serialize(data): - """Serialization wrapper. - - We prefer using JSON, but it cannot encode all types. - Error if a developer passes us bad data. - """ - try: - return jsonutils.dumps(data, ensure_ascii=True) - except TypeError: - with excutils.save_and_reraise_exception(): - LOG.error(_("JSON serialization failed.")) - - -def _deserialize(data): - """Deserialization wrapper.""" - LOG.debug(_("Deserializing: %s"), data) - return jsonutils.loads(data) - - -class ZmqSocket(object): - """A tiny wrapper around ZeroMQ. - - Simplifies the send/recv protocol and connection management. - Can be used as a Context (supports the 'with' statement). - """ - - def __init__(self, addr, zmq_type, bind=True, subscribe=None): - self.sock = _get_ctxt().socket(zmq_type) - self.addr = addr - self.type = zmq_type - self.subscriptions = [] - - # Support failures on sending/receiving on wrong socket type. - self.can_recv = zmq_type in (zmq.PULL, zmq.SUB) - self.can_send = zmq_type in (zmq.PUSH, zmq.PUB) - self.can_sub = zmq_type in (zmq.SUB, ) - - # Support list, str, & None for subscribe arg (cast to list) - do_sub = { - list: subscribe, - str: [subscribe], - type(None): [] - }[type(subscribe)] - - for f in do_sub: - self.subscribe(f) - - str_data = {'addr': addr, 'type': self.socket_s(), - 'subscribe': subscribe, 'bind': bind} - - LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data) - LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data) - LOG.debug(_("-> bind: %(bind)s"), str_data) - - try: - if bind: - self.sock.bind(addr) - else: - self.sock.connect(addr) - except Exception: - raise RPCException(_("Could not open socket.")) - - def socket_s(self): - """Get socket type as string.""" - t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER', - 'DEALER') - return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type] - - def subscribe(self, msg_filter): - """Subscribe.""" - if not self.can_sub: - raise RPCException("Cannot subscribe on this socket.") - LOG.debug(_("Subscribing to %s"), msg_filter) - - try: - self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter) - except Exception: - return - - self.subscriptions.append(msg_filter) - - def unsubscribe(self, msg_filter): - """Unsubscribe.""" - if msg_filter not in self.subscriptions: - return - self.sock.setsockopt(zmq.UNSUBSCRIBE, msg_filter) - self.subscriptions.remove(msg_filter) - - def close(self): - if self.sock is None or self.sock.closed: - return - - # We must unsubscribe, or we'll leak descriptors. - if self.subscriptions: - for f in self.subscriptions: - try: - self.sock.setsockopt(zmq.UNSUBSCRIBE, f) - except Exception: - pass - self.subscriptions = [] - - try: - # Default is to linger - self.sock.close() - except Exception: - # While this is a bad thing to happen, - # it would be much worse if some of the code calling this - # were to fail. For now, lets log, and later evaluate - # if we can safely raise here. - LOG.error(_("ZeroMQ socket could not be closed.")) - self.sock = None - - def recv(self, **kwargs): - if not self.can_recv: - raise RPCException(_("You cannot recv on this socket.")) - return self.sock.recv_multipart(**kwargs) - - def send(self, data, **kwargs): - if not self.can_send: - raise RPCException(_("You cannot send on this socket.")) - self.sock.send_multipart(data, **kwargs) - - -class ZmqClient(object): - """Client for ZMQ sockets.""" - - def __init__(self, addr): - self.outq = ZmqSocket(addr, zmq.PUSH, bind=False) - - def cast(self, msg_id, topic, data, envelope): - msg_id = msg_id or 0 - - if not envelope: - self.outq.send(map(bytes, - (msg_id, topic, 'cast', _serialize(data)))) - return - - rpc_envelope = rpc_common.serialize_msg(data[1], envelope) - zmq_msg = reduce(lambda x, y: x + y, rpc_envelope.items()) - self.outq.send(map(bytes, - (msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg)) - - def close(self): - self.outq.close() - - -class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call.""" - def __init__(self, **kwargs): - self.replies = [] - super(RpcContext, self).__init__(**kwargs) - - def deepcopy(self): - values = self.to_dict() - values['replies'] = self.replies - return self.__class__(**values) - - def reply(self, reply=None, failure=None, ending=False): - if ending: - return - self.replies.append(reply) - - @classmethod - def marshal(self, ctx): - ctx_data = ctx.to_dict() - return _serialize(ctx_data) - - @classmethod - def unmarshal(self, data): - return RpcContext.from_dict(_deserialize(data)) - - -class InternalContext(object): - """Used by ConsumerBase as a private context for - methods.""" - - def __init__(self, proxy): - self.proxy = proxy - self.msg_waiter = None - - def _get_response(self, ctx, proxy, topic, data): - """Process a curried message and cast the result to topic.""" - LOG.debug(_("Running func with context: %s"), ctx.to_dict()) - data.setdefault('version', None) - data.setdefault('args', {}) - - try: - result = proxy.dispatch( - ctx, data['version'], data['method'], - data.get('namespace'), **data['args']) - return ConsumerBase.normalize_reply(result, ctx.replies) - except greenlet.GreenletExit: - # ignore these since they are just from shutdowns - pass - except rpc_common.ClientException as e: - LOG.debug(_("Expected exception during message handling (%s)") % - e._exc_info[1]) - return {'exc': - rpc_common.serialize_remote_exception(e._exc_info, - log_failure=False)} - except Exception: - LOG.error(_("Exception during message handling")) - return {'exc': - rpc_common.serialize_remote_exception(sys.exc_info())} - - def reply(self, ctx, proxy, - msg_id=None, context=None, topic=None, msg=None): - """Reply to a casted call.""" - # NOTE(ewindisch): context kwarg exists for Grizzly compat. - # this may be able to be removed earlier than - # 'I' if ConsumerBase.process were refactored. - if type(msg) is list: - payload = msg[-1] - else: - payload = msg - - response = ConsumerBase.normalize_reply( - self._get_response(ctx, proxy, topic, payload), - ctx.replies) - - LOG.debug(_("Sending reply")) - _multi_send(_cast, ctx, topic, { - 'method': '-process_reply', - 'args': { - 'msg_id': msg_id, # Include for Folsom compat. - 'response': response - } - }, _msg_id=msg_id) - - -class ConsumerBase(object): - """Base Consumer.""" - - def __init__(self): - self.private_ctx = InternalContext(None) - - @classmethod - def normalize_reply(self, result, replies): - #TODO(ewindisch): re-evaluate and document this method. - if isinstance(result, types.GeneratorType): - return list(result) - elif replies: - return replies - else: - return [result] - - def process(self, proxy, ctx, data): - data.setdefault('version', None) - data.setdefault('args', {}) - - # Method starting with - are - # processed internally. (non-valid method name) - method = data.get('method') - if not method: - LOG.error(_("RPC message did not include method.")) - return - - # Internal method - # uses internal context for safety. - if method == '-reply': - self.private_ctx.reply(ctx, proxy, **data['args']) - return - - proxy.dispatch(ctx, data['version'], - data['method'], data.get('namespace'), **data['args']) - - -class ZmqBaseReactor(ConsumerBase): - """A consumer class implementing a centralized casting broker (PULL-PUSH). - - Used for RoundRobin requests. - """ - - def __init__(self, conf): - super(ZmqBaseReactor, self).__init__() - - self.proxies = {} - self.threads = [] - self.sockets = [] - self.subscribe = {} - - self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size) - - def register(self, proxy, in_addr, zmq_type_in, - in_bind=True, subscribe=None): - - LOG.info(_("Registering reactor")) - - if zmq_type_in not in (zmq.PULL, zmq.SUB): - raise RPCException("Bad input socktype") - - # Items push in. - inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind, - subscribe=subscribe) - - self.proxies[inq] = proxy - self.sockets.append(inq) - - LOG.info(_("In reactor registered")) - - def consume_in_thread(self): - @excutils.forever_retry_uncaught_exceptions - def _consume(sock): - LOG.info(_("Consuming socket")) - while True: - self.consume(sock) - - for k in self.proxies.keys(): - self.threads.append( - self.pool.spawn(_consume, k) - ) - - def wait(self): - for t in self.threads: - t.wait() - - def close(self): - for s in self.sockets: - s.close() - - for t in self.threads: - t.kill() - - -class ZmqProxy(ZmqBaseReactor): - """A consumer class implementing a topic-based proxy. - - Forwards to IPC sockets. - """ - - def __init__(self, conf): - super(ZmqProxy, self).__init__(conf) - pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) - self.badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) - - self.topic_proxy = {} - - def consume(self, sock): - ipc_dir = CONF.rpc_zmq_ipc_dir - - data = sock.recv(copy=False) - topic = data[1].bytes - - if topic.startswith('fanout~'): - sock_type = zmq.PUB - topic = topic.split('.', 1)[0] - elif topic.startswith('zmq_replies'): - sock_type = zmq.PUB - else: - sock_type = zmq.PUSH - - if topic not in self.topic_proxy: - def publisher(waiter): - LOG.info(_("Creating proxy for topic: %s"), topic) - - try: - # The topic is received over the network, - # don't trust this input. - if self.badchars.search(topic) is not None: - emsg = _("Topic contained dangerous characters.") - LOG.warn(emsg) - raise RPCException(emsg) - - out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" % - (ipc_dir, topic), - sock_type, bind=True) - except RPCException: - waiter.send_exception(*sys.exc_info()) - return - - self.topic_proxy[topic] = eventlet.queue.LightQueue( - CONF.rpc_zmq_topic_backlog) - self.sockets.append(out_sock) - - # It takes some time for a pub socket to open, - # before we can have any faith in doing a send() to it. - if sock_type == zmq.PUB: - eventlet.sleep(.5) - - waiter.send(True) - - while(True): - data = self.topic_proxy[topic].get() - out_sock.send(data, copy=False) - - wait_sock_creation = eventlet.event.Event() - eventlet.spawn(publisher, wait_sock_creation) - - try: - wait_sock_creation.wait() - except RPCException: - LOG.error(_("Topic socket file creation failed.")) - return - - try: - self.topic_proxy[topic].put_nowait(data) - except eventlet.queue.Full: - LOG.error(_("Local per-topic backlog buffer full for topic " - "%(topic)s. Dropping message.") % {'topic': topic}) - - def consume_in_thread(self): - """Runs the ZmqProxy service.""" - ipc_dir = CONF.rpc_zmq_ipc_dir - consume_in = "tcp://%s:%s" % \ - (CONF.rpc_zmq_bind_address, - CONF.rpc_zmq_port) - consumption_proxy = InternalContext(None) - - try: - os.makedirs(ipc_dir) - except os.error: - if not os.path.isdir(ipc_dir): - with excutils.save_and_reraise_exception(): - LOG.error(_("Required IPC directory does not exist at" - " %s") % (ipc_dir, )) - try: - self.register(consumption_proxy, - consume_in, - zmq.PULL) - except zmq.ZMQError: - if os.access(ipc_dir, os.X_OK): - with excutils.save_and_reraise_exception(): - LOG.error(_("Permission denied to IPC directory at" - " %s") % (ipc_dir, )) - with excutils.save_and_reraise_exception(): - LOG.error(_("Could not create ZeroMQ receiver daemon. " - "Socket may already be in use.")) - - super(ZmqProxy, self).consume_in_thread() - - -def unflatten_envelope(packenv): - """Unflattens the RPC envelope. - - Takes a list and returns a dictionary. - i.e. [1,2,3,4] => {1: 2, 3: 4} - """ - i = iter(packenv) - h = {} - try: - while True: - k = i.next() - h[k] = i.next() - except StopIteration: - return h - - -class ZmqReactor(ZmqBaseReactor): - """A consumer class implementing a consumer for messages. - - Can also be used as a 1:1 proxy - """ - - def __init__(self, conf): - super(ZmqReactor, self).__init__(conf) - - def consume(self, sock): - #TODO(ewindisch): use zero-copy (i.e. references, not copying) - data = sock.recv() - LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data) - - proxy = self.proxies[sock] - - if data[2] == 'cast': # Legacy protocol - packenv = data[3] - - ctx, msg = _deserialize(packenv) - request = rpc_common.deserialize_msg(msg) - ctx = RpcContext.unmarshal(ctx) - elif data[2] == 'impl_zmq_v2': - packenv = data[4:] - - msg = unflatten_envelope(packenv) - request = rpc_common.deserialize_msg(msg) - - # Unmarshal only after verifying the message. - ctx = RpcContext.unmarshal(data[3]) - else: - LOG.error(_("ZMQ Envelope version unsupported or unknown.")) - return - - self.pool.spawn_n(self.process, proxy, ctx, request) - - -class Connection(rpc_common.Connection): - """Manages connections and threads.""" - - def __init__(self, conf): - self.topics = [] - self.reactor = ZmqReactor(conf) - - def create_consumer(self, topic, proxy, fanout=False): - # Register with matchmaker. - _get_matchmaker().register(topic, CONF.rpc_zmq_host) - - # Subscription scenarios - if fanout: - sock_type = zmq.SUB - subscribe = ('', fanout)[type(fanout) == str] - topic = 'fanout~' + topic.split('.', 1)[0] - else: - sock_type = zmq.PULL - subscribe = None - topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host)) - - if topic in self.topics: - LOG.info(_("Skipping topic registration. Already registered.")) - return - - # Receive messages from (local) proxy - inaddr = "ipc://%s/zmq_topic_%s" % \ - (CONF.rpc_zmq_ipc_dir, topic) - - LOG.debug(_("Consumer is a zmq.%s"), - ['PULL', 'SUB'][sock_type == zmq.SUB]) - - self.reactor.register(proxy, inaddr, sock_type, - subscribe=subscribe, in_bind=False) - self.topics.append(topic) - - def close(self): - _get_matchmaker().stop_heartbeat() - for topic in self.topics: - _get_matchmaker().unregister(topic, CONF.rpc_zmq_host) - - self.reactor.close() - self.topics = [] - - def wait(self): - self.reactor.wait() - - def consume_in_thread(self): - _get_matchmaker().start_heartbeat() - self.reactor.consume_in_thread() - - -def _cast(addr, context, topic, msg, timeout=None, envelope=False, - _msg_id=None): - timeout_cast = timeout or CONF.rpc_cast_timeout - payload = [RpcContext.marshal(context), msg] - - with Timeout(timeout_cast, exception=rpc_common.Timeout): - try: - conn = ZmqClient(addr) - - # assumes cast can't return an exception - conn.cast(_msg_id, topic, payload, envelope) - except zmq.ZMQError: - raise RPCException("Cast failed. ZMQ Socket Exception") - finally: - if 'conn' in vars(): - conn.close() - - -def _call(addr, context, topic, msg, timeout=None, - envelope=False): - # timeout_response is how long we wait for a response - timeout = timeout or CONF.rpc_response_timeout - - # The msg_id is used to track replies. - msg_id = uuid.uuid4().hex - - # Replies always come into the reply service. - reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host - - LOG.debug(_("Creating payload")) - # Curry the original request into a reply method. - mcontext = RpcContext.marshal(context) - payload = { - 'method': '-reply', - 'args': { - 'msg_id': msg_id, - 'topic': reply_topic, - # TODO(ewindisch): safe to remove mcontext in I. - 'msg': [mcontext, msg] - } - } - - LOG.debug(_("Creating queue socket for reply waiter")) - - # Messages arriving async. - # TODO(ewindisch): have reply consumer with dynamic subscription mgmt - with Timeout(timeout, exception=rpc_common.Timeout): - try: - msg_waiter = ZmqSocket( - "ipc://%s/zmq_topic_zmq_replies.%s" % - (CONF.rpc_zmq_ipc_dir, - CONF.rpc_zmq_host), - zmq.SUB, subscribe=msg_id, bind=False - ) - - LOG.debug(_("Sending cast")) - _cast(addr, context, topic, payload, envelope) - - LOG.debug(_("Cast sent; Waiting reply")) - # Blocks until receives reply - msg = msg_waiter.recv() - LOG.debug(_("Received message: %s"), msg) - LOG.debug(_("Unpacking response")) - - if msg[2] == 'cast': # Legacy version - raw_msg = _deserialize(msg[-1])[-1] - elif msg[2] == 'impl_zmq_v2': - rpc_envelope = unflatten_envelope(msg[4:]) - raw_msg = rpc_common.deserialize_msg(rpc_envelope) - else: - raise rpc_common.UnsupportedRpcEnvelopeVersion( - _("Unsupported or unknown ZMQ envelope returned.")) - - responses = raw_msg['args']['response'] - # ZMQError trumps the Timeout error. - except zmq.ZMQError: - raise RPCException("ZMQ Socket Error") - except (IndexError, KeyError): - raise RPCException(_("RPC Message Invalid.")) - finally: - if 'msg_waiter' in vars(): - msg_waiter.close() - - # It seems we don't need to do all of the following, - # but perhaps it would be useful for multicall? - # One effect of this is that we're checking all - # responses for Exceptions. - for resp in responses: - if isinstance(resp, types.DictType) and 'exc' in resp: - raise rpc_common.deserialize_remote_exception(CONF, resp['exc']) - - return responses[-1] - - -def _multi_send(method, context, topic, msg, timeout=None, - envelope=False, _msg_id=None): - """Wraps the sending of messages. - - Dispatches to the matchmaker and sends message to all relevant hosts. - """ - conf = CONF - LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))}) - - queues = _get_matchmaker().queues(topic) - LOG.debug(_("Sending message(s) to: %s"), queues) - - # Don't stack if we have no matchmaker results - if not queues: - LOG.warn(_("No matchmaker results. Not casting.")) - # While not strictly a timeout, callers know how to handle - # this exception and a timeout isn't too big a lie. - raise rpc_common.Timeout(_("No match from matchmaker.")) - - # This supports brokerless fanout (addresses > 1) - for queue in queues: - (_topic, ip_addr) = queue - _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port) - - if method.__name__ == '_cast': - eventlet.spawn_n(method, _addr, context, - _topic, msg, timeout, envelope, - _msg_id) - return - return method(_addr, context, _topic, msg, timeout, - envelope) - - -def create_connection(conf, new=True): - return Connection(conf) - - -def multicall(conf, *args, **kwargs): - """Multiple calls.""" - return _multi_send(_call, *args, **kwargs) - - -def call(conf, *args, **kwargs): - """Send a message, expect a response.""" - data = _multi_send(_call, *args, **kwargs) - return data[-1] - - -def cast(conf, *args, **kwargs): - """Send a message expecting no reply.""" - _multi_send(_cast, *args, **kwargs) - - -def fanout_cast(conf, context, topic, msg, **kwargs): - """Send a message to all listening and expect no reply.""" - # NOTE(ewindisch): fanout~ is used because it avoid splitting on . - # and acts as a non-subtle hint to the matchmaker and ZmqProxy. - _multi_send(_cast, context, 'fanout~' + str(topic), msg, **kwargs) - - -def notify(conf, context, topic, msg, envelope): - """Send notification event. - - Notifications are sent to topic-priority. - This differs from the AMQP drivers which send to topic.priority. - """ - # NOTE(ewindisch): dot-priority in rpc notifier does not - # work with our assumptions. - topic = topic.replace('.', '-') - cast(conf, context, topic, msg, envelope=envelope) - - -def cleanup(): - """Clean up resources in use by implementation.""" - global ZMQ_CTX - if ZMQ_CTX: - ZMQ_CTX.term() - ZMQ_CTX = None - - global matchmaker - matchmaker = None - - -def _get_ctxt(): - if not zmq: - raise ImportError("Failed to import eventlet.green.zmq") - - global ZMQ_CTX - if not ZMQ_CTX: - ZMQ_CTX = zmq.Context(CONF.rpc_zmq_contexts) - return ZMQ_CTX - - -def _get_matchmaker(*args, **kwargs): - global matchmaker - if not matchmaker: - mm = CONF.rpc_zmq_matchmaker - if mm.endswith('matchmaker.MatchMakerRing'): - mm.replace('matchmaker', 'matchmaker_ring') - LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use' - ' %(new)s instead') % dict( - orig=CONF.rpc_zmq_matchmaker, new=mm)) - matchmaker = importutils.import_object(mm, *args, **kwargs) - return matchmaker diff --git a/billingstack/openstack/common/rpc/matchmaker.py b/billingstack/openstack/common/rpc/matchmaker.py deleted file mode 100644 index 290b991..0000000 --- a/billingstack/openstack/common/rpc/matchmaker.py +++ /dev/null @@ -1,324 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should except a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -import contextlib - -import eventlet -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging - - -matchmaker_opts = [ - cfg.IntOpt('matchmaker_heartbeat_freq', - default=300, - help='Heartbeat frequency'), - cfg.IntOpt('matchmaker_heartbeat_ttl', - default=600, - help='Heartbeat time-to-live.'), -] - -CONF = cfg.CONF -CONF.register_opts(matchmaker_opts) -LOG = logging.getLogger(__name__) -contextmanager = contextlib.contextmanager - - -class MatchMakerException(Exception): - """Signified a match could not be found.""" - message = _("Match not found by MatchMaker.") - - -class Exchange(object): - """Implements lookups. - - Subclass this to support hashtables, dns, etc. - """ - def __init__(self): - pass - - def run(self, key): - raise NotImplementedError() - - -class Binding(object): - """A binding on which to perform a lookup.""" - def __init__(self): - pass - - def test(self, key): - raise NotImplementedError() - - -class MatchMakerBase(object): - """Match Maker Base Class. - - Build off HeartbeatMatchMakerBase if building a heartbeat-capable - MatchMaker. - """ - def __init__(self): - # Array of tuples. Index [2] toggles negation, [3] is last-if-true - self.bindings = [] - - self.no_heartbeat_msg = _('Matchmaker does not implement ' - 'registration or heartbeat.') - - def register(self, key, host): - """Register a host on a backend. - - Heartbeats, if applicable, may keepalive registration. - """ - pass - - def ack_alive(self, key, host): - """Acknowledge that a key.host is alive. - - Used internally for updating heartbeats, but may also be used - publically to acknowledge a system is alive (i.e. rpc message - successfully sent to host) - """ - pass - - def is_alive(self, topic, host): - """Checks if a host is alive.""" - pass - - def expire(self, topic, host): - """Explicitly expire a host's registration.""" - pass - - def send_heartbeats(self): - """Send all heartbeats. - - Use start_heartbeat to spawn a heartbeat greenthread, - which loops this method. - """ - pass - - def unregister(self, key, host): - """Unregister a topic.""" - pass - - def start_heartbeat(self): - """Spawn heartbeat greenthread.""" - pass - - def stop_heartbeat(self): - """Destroys the heartbeat greenthread.""" - pass - - def add_binding(self, binding, rule, last=True): - self.bindings.append((binding, rule, False, last)) - - #NOTE(ewindisch): kept the following method in case we implement the - # underlying support. - #def add_negate_binding(self, binding, rule, last=True): - # self.bindings.append((binding, rule, True, last)) - - def queues(self, key): - workers = [] - - # bit is for negate bindings - if we choose to implement it. - # last stops processing rules if this matches. - for (binding, exchange, bit, last) in self.bindings: - if binding.test(key): - workers.extend(exchange.run(key)) - - # Support last. - if last: - return workers - return workers - - -class HeartbeatMatchMakerBase(MatchMakerBase): - """Base for a heart-beat capable MatchMaker. - - Provides common methods for registering, unregistering, and maintaining - heartbeats. - """ - def __init__(self): - self.hosts = set() - self._heart = None - self.host_topic = {} - - super(HeartbeatMatchMakerBase, self).__init__() - - def send_heartbeats(self): - """Send all heartbeats. - - Use start_heartbeat to spawn a heartbeat greenthread, - which loops this method. - """ - for key, host in self.host_topic: - self.ack_alive(key, host) - - def ack_alive(self, key, host): - """Acknowledge that a host.topic is alive. - - Used internally for updating heartbeats, but may also be used - publically to acknowledge a system is alive (i.e. rpc message - successfully sent to host) - """ - raise NotImplementedError("Must implement ack_alive") - - def backend_register(self, key, host): - """Implements registration logic. - - Called by register(self,key,host) - """ - raise NotImplementedError("Must implement backend_register") - - def backend_unregister(self, key, key_host): - """Implements de-registration logic. - - Called by unregister(self,key,host) - """ - raise NotImplementedError("Must implement backend_unregister") - - def register(self, key, host): - """Register a host on a backend. - - Heartbeats, if applicable, may keepalive registration. - """ - self.hosts.add(host) - self.host_topic[(key, host)] = host - key_host = '.'.join((key, host)) - - self.backend_register(key, key_host) - - self.ack_alive(key, host) - - def unregister(self, key, host): - """Unregister a topic.""" - if (key, host) in self.host_topic: - del self.host_topic[(key, host)] - - self.hosts.discard(host) - self.backend_unregister(key, '.'.join((key, host))) - - LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), - {'key': key, 'host': host}) - - def start_heartbeat(self): - """Implementation of MatchMakerBase.start_heartbeat. - - Launches greenthread looping send_heartbeats(), - yielding for CONF.matchmaker_heartbeat_freq seconds - between iterations. - """ - if not self.hosts: - raise MatchMakerException( - _("Register before starting heartbeat.")) - - def do_heartbeat(): - while True: - self.send_heartbeats() - eventlet.sleep(CONF.matchmaker_heartbeat_freq) - - self._heart = eventlet.spawn(do_heartbeat) - - def stop_heartbeat(self): - """Destroys the heartbeat greenthread.""" - if self._heart: - self._heart.kill() - - -class DirectBinding(Binding): - """Specifies a host in the key via a '.' character. - - Although dots are used in the key, the behavior here is - that it maps directly to a host, thus direct. - """ - def test(self, key): - return '.' in key - - -class TopicBinding(Binding): - """Where a 'bare' key without dots. - - AMQP generally considers topic exchanges to be those *with* dots, - but we deviate here in terminology as the behavior here matches - that of a topic exchange (whereas where there are dots, behavior - matches that of a direct exchange. - """ - def test(self, key): - return '.' not in key - - -class FanoutBinding(Binding): - """Match on fanout keys, where key starts with 'fanout.' string.""" - def test(self, key): - return key.startswith('fanout~') - - -class StubExchange(Exchange): - """Exchange that does nothing.""" - def run(self, key): - return [(key, None)] - - -class LocalhostExchange(Exchange): - """Exchange where all direct topics are local.""" - def __init__(self, host='localhost'): - self.host = host - super(Exchange, self).__init__() - - def run(self, key): - return [('.'.join((key.split('.')[0], self.host)), self.host)] - - -class DirectExchange(Exchange): - """Exchange where all topic keys are split, sending to second half. - - i.e. "compute.host" sends a message to "compute.host" running on "host" - """ - def __init__(self): - super(Exchange, self).__init__() - - def run(self, key): - e = key.split('.', 1)[1] - return [(key, e)] - - -class MatchMakerLocalhost(MatchMakerBase): - """Match Maker where all bare topics resolve to localhost. - - Useful for testing. - """ - def __init__(self, host='localhost'): - super(MatchMakerLocalhost, self).__init__() - self.add_binding(FanoutBinding(), LocalhostExchange(host)) - self.add_binding(DirectBinding(), DirectExchange()) - self.add_binding(TopicBinding(), LocalhostExchange(host)) - - -class MatchMakerStub(MatchMakerBase): - """Match Maker where topics are untouched. - - Useful for testing, or for AMQP/brokered queues. - Will not work where knowledge of hosts is known (i.e. zeromq) - """ - def __init__(self): - super(MatchMakerStub, self).__init__() - - self.add_binding(FanoutBinding(), StubExchange()) - self.add_binding(DirectBinding(), StubExchange()) - self.add_binding(TopicBinding(), StubExchange()) diff --git a/billingstack/openstack/common/rpc/matchmaker_redis.py b/billingstack/openstack/common/rpc/matchmaker_redis.py deleted file mode 100644 index 273e164..0000000 --- a/billingstack/openstack/common/rpc/matchmaker_redis.py +++ /dev/null @@ -1,145 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should accept a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -from oslo.config import cfg - -from billingstack.openstack.common import importutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import matchmaker as mm_common - -redis = importutils.try_import('redis') - - -matchmaker_redis_opts = [ - cfg.StrOpt('host', - default='127.0.0.1', - help='Host to locate redis'), - cfg.IntOpt('port', - default=6379, - help='Use this port to connect to redis host.'), - cfg.StrOpt('password', - default=None, - help='Password for Redis server. (optional)'), -] - -CONF = cfg.CONF -opt_group = cfg.OptGroup(name='matchmaker_redis', - title='Options for Redis-based MatchMaker') -CONF.register_group(opt_group) -CONF.register_opts(matchmaker_redis_opts, opt_group) -LOG = logging.getLogger(__name__) - - -class RedisExchange(mm_common.Exchange): - def __init__(self, matchmaker): - self.matchmaker = matchmaker - self.redis = matchmaker.redis - super(RedisExchange, self).__init__() - - -class RedisTopicExchange(RedisExchange): - """Exchange where all topic keys are split, sending to second half. - - i.e. "compute.host" sends a message to "compute" running on "host" - """ - def run(self, topic): - while True: - member_name = self.redis.srandmember(topic) - - if not member_name: - # If this happens, there are no - # longer any members. - break - - if not self.matchmaker.is_alive(topic, member_name): - continue - - host = member_name.split('.', 1)[1] - return [(member_name, host)] - return [] - - -class RedisFanoutExchange(RedisExchange): - """Return a list of all hosts.""" - def run(self, topic): - topic = topic.split('~', 1)[1] - hosts = self.redis.smembers(topic) - good_hosts = filter( - lambda host: self.matchmaker.is_alive(topic, host), hosts) - - return [(x, x.split('.', 1)[1]) for x in good_hosts] - - -class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase): - """MatchMaker registering and looking-up hosts with a Redis server.""" - def __init__(self): - super(MatchMakerRedis, self).__init__() - - if not redis: - raise ImportError("Failed to import module redis.") - - self.redis = redis.StrictRedis( - host=CONF.matchmaker_redis.host, - port=CONF.matchmaker_redis.port, - password=CONF.matchmaker_redis.password) - - self.add_binding(mm_common.FanoutBinding(), RedisFanoutExchange(self)) - self.add_binding(mm_common.DirectBinding(), mm_common.DirectExchange()) - self.add_binding(mm_common.TopicBinding(), RedisTopicExchange(self)) - - def ack_alive(self, key, host): - topic = "%s.%s" % (key, host) - if not self.redis.expire(topic, CONF.matchmaker_heartbeat_ttl): - # If we could not update the expiration, the key - # might have been pruned. Re-register, creating a new - # key in Redis. - self.register(self.topic_host[host], host) - - def is_alive(self, topic, host): - if self.redis.ttl(host) == -1: - self.expire(topic, host) - return False - return True - - def expire(self, topic, host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.delete(host) - pipe.srem(topic, host) - pipe.execute() - - def backend_register(self, key, key_host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.sadd(key, key_host) - - # No value is needed, we just - # care if it exists. Sets aren't viable - # because only keys can expire. - pipe.set(key_host, '') - - pipe.execute() - - def backend_unregister(self, key, key_host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.srem(key, key_host) - pipe.delete(key_host) - pipe.execute() diff --git a/billingstack/openstack/common/rpc/matchmaker_ring.py b/billingstack/openstack/common/rpc/matchmaker_ring.py deleted file mode 100644 index 0dca9d1..0000000 --- a/billingstack/openstack/common/rpc/matchmaker_ring.py +++ /dev/null @@ -1,108 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011-2013 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should except a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -import itertools -import json - -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.rpc import matchmaker as mm - - -matchmaker_opts = [ - # Matchmaker ring file - cfg.StrOpt('ringfile', - deprecated_name='matchmaker_ringfile', - deprecated_group='DEFAULT', - default='/etc/oslo/matchmaker_ring.json', - help='Matchmaker ring file (JSON)'), -] - -CONF = cfg.CONF -CONF.register_opts(matchmaker_opts, 'matchmaker_ring') -LOG = logging.getLogger(__name__) - - -class RingExchange(mm.Exchange): - """Match Maker where hosts are loaded from a static JSON formatted file. - - __init__ takes optional ring dictionary argument, otherwise - loads the ringfile from CONF.mathcmaker_ringfile. - """ - def __init__(self, ring=None): - super(RingExchange, self).__init__() - - if ring: - self.ring = ring - else: - fh = open(CONF.matchmaker_ring.ringfile, 'r') - self.ring = json.load(fh) - fh.close() - - self.ring0 = {} - for k in self.ring.keys(): - self.ring0[k] = itertools.cycle(self.ring[k]) - - def _ring_has(self, key): - return key in self.ring0 - - -class RoundRobinRingExchange(RingExchange): - """A Topic Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(RoundRobinRingExchange, self).__init__(ring) - - def run(self, key): - if not self._ring_has(key): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile") % (key, ) - ) - return [] - host = next(self.ring0[key]) - return [(key + '.' + host, host)] - - -class FanoutRingExchange(RingExchange): - """Fanout Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(FanoutRingExchange, self).__init__(ring) - - def run(self, key): - # Assume starts with "fanout~", strip it for lookup. - nkey = key.split('fanout~')[1:][0] - if not self._ring_has(nkey): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile") % (nkey, ) - ) - return [] - return map(lambda x: (key + '.' + x, x), self.ring[nkey]) - - -class MatchMakerRing(mm.MatchMakerBase): - """Match Maker where hosts are loaded from a static hashmap.""" - def __init__(self, ring=None): - super(MatchMakerRing, self).__init__() - self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring)) - self.add_binding(mm.DirectBinding(), mm.DirectExchange()) - self.add_binding(mm.TopicBinding(), RoundRobinRingExchange(ring)) diff --git a/billingstack/openstack/common/rpc/proxy.py b/billingstack/openstack/common/rpc/proxy.py deleted file mode 100644 index 2b791d7..0000000 --- a/billingstack/openstack/common/rpc/proxy.py +++ /dev/null @@ -1,225 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012-2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -A helper class for proxy objects to remote APIs. - -For more information about rpc API version numbers, see: - rpc/dispatcher.py -""" - -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import common as rpc_common -from billingstack.openstack.common.rpc import serializer as rpc_serializer - - -class RpcProxy(object): - """A helper class for rpc clients. - - This class is a wrapper around the RPC client API. It allows you to - specify the topic and API version in a single place. This is intended to - be used as a base class for a class that implements the client side of an - rpc API. - """ - - # The default namespace, which can be overridden in a subclass. - RPC_API_NAMESPACE = None - - def __init__(self, topic, default_version, version_cap=None, - serializer=None): - """Initialize an RpcProxy. - - :param topic: The topic to use for all messages. - :param default_version: The default API version to request in all - outgoing messages. This can be overridden on a per-message - basis. - :param version_cap: Optionally cap the maximum version used for sent - messages. - :param serializer: Optionaly (de-)serialize entities with a - provided helper. - """ - self.topic = topic - self.default_version = default_version - self.version_cap = version_cap - if serializer is None: - serializer = rpc_serializer.NoOpSerializer() - self.serializer = serializer - super(RpcProxy, self).__init__() - - def _set_version(self, msg, vers): - """Helper method to set the version in a message. - - :param msg: The message having a version added to it. - :param vers: The version number to add to the message. - """ - v = vers if vers else self.default_version - if (self.version_cap and not - rpc_common.version_is_compatible(self.version_cap, v)): - raise rpc_common.RpcVersionCapError(version_cap=self.version_cap) - msg['version'] = v - - def _get_topic(self, topic): - """Return the topic to use for a message.""" - return topic if topic else self.topic - - def can_send_version(self, version): - """Check to see if a version is compatible with the version cap.""" - return (not self.version_cap or - rpc_common.version_is_compatible(self.version_cap, version)) - - @staticmethod - def make_namespaced_msg(method, namespace, **kwargs): - return {'method': method, 'namespace': namespace, 'args': kwargs} - - def make_msg(self, method, **kwargs): - return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE, - **kwargs) - - def _serialize_msg_args(self, context, kwargs): - """Helper method called to serialize message arguments. - - This calls our serializer on each argument, returning a new - set of args that have been serialized. - - :param context: The request context - :param kwargs: The arguments to serialize - :returns: A new set of serialized arguments - """ - new_kwargs = dict() - for argname, arg in kwargs.iteritems(): - new_kwargs[argname] = self.serializer.serialize_entity(context, - arg) - return new_kwargs - - def call(self, context, msg, topic=None, version=None, timeout=None): - """rpc.call() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - :param timeout: (Optional) A timeout to use when waiting for the - response. If no timeout is specified, a default timeout will be - used that is usually sufficient. - - :returns: The return value from the remote method. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - real_topic = self._get_topic(topic) - try: - result = rpc.call(context, real_topic, msg, timeout) - return self.serializer.deserialize_entity(context, result) - except rpc.common.Timeout as exc: - raise rpc.common.Timeout( - exc.info, real_topic, msg.get('method')) - - def multicall(self, context, msg, topic=None, version=None, timeout=None): - """rpc.multicall() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - :param timeout: (Optional) A timeout to use when waiting for the - response. If no timeout is specified, a default timeout will be - used that is usually sufficient. - - :returns: An iterator that lets you process each of the returned values - from the remote method as they arrive. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - real_topic = self._get_topic(topic) - try: - result = rpc.multicall(context, real_topic, msg, timeout) - return self.serializer.deserialize_entity(context, result) - except rpc.common.Timeout as exc: - raise rpc.common.Timeout( - exc.info, real_topic, msg.get('method')) - - def cast(self, context, msg, topic=None, version=None): - """rpc.cast() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.cast() does not wait on any return value from the - remote method. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.cast(context, self._get_topic(topic), msg) - - def fanout_cast(self, context, msg, topic=None, version=None): - """rpc.fanout_cast() a remote method. - - :param context: The request context - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.fanout_cast() does not wait on any return value - from the remote method. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.fanout_cast(context, self._get_topic(topic), msg) - - def cast_to_server(self, context, server_params, msg, topic=None, - version=None): - """rpc.cast_to_server() a remote method. - - :param context: The request context - :param server_params: Server parameters. See rpc.cast_to_server() for - details. - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.cast_to_server() does not wait on any - return values. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.cast_to_server(context, server_params, self._get_topic(topic), msg) - - def fanout_cast_to_server(self, context, server_params, msg, topic=None, - version=None): - """rpc.fanout_cast_to_server() a remote method. - - :param context: The request context - :param server_params: Server parameters. See rpc.cast_to_server() for - details. - :param msg: The message to send, including the method and args. - :param topic: Override the topic for this message. - :param version: (Optional) Override the requested API version in this - message. - - :returns: None. rpc.fanout_cast_to_server() does not wait on any - return values. - """ - self._set_version(msg, version) - msg['args'] = self._serialize_msg_args(context, msg['args']) - rpc.fanout_cast_to_server(context, server_params, - self._get_topic(topic), msg) diff --git a/billingstack/openstack/common/rpc/securemessage.py b/billingstack/openstack/common/rpc/securemessage.py deleted file mode 100644 index ee46d58..0000000 --- a/billingstack/openstack/common/rpc/securemessage.py +++ /dev/null @@ -1,521 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 -import collections -import os -import struct -import time - -import requests - -from oslo.config import cfg - -from billingstack.openstack.common.crypto import utils as cryptoutils -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging - -secure_message_opts = [ - cfg.BoolOpt('enabled', default=True, - help='Whether Secure Messaging (Signing) is enabled,' - ' defaults to enabled'), - cfg.BoolOpt('enforced', default=False, - help='Whether Secure Messaging (Signing) is enforced,' - ' defaults to not enforced'), - cfg.BoolOpt('encrypt', default=False, - help='Whether Secure Messaging (Encryption) is enabled,' - ' defaults to not enabled'), - cfg.StrOpt('secret_keys_file', - help='Path to the file containing the keys, takes precedence' - ' over secret_key'), - cfg.MultiStrOpt('secret_key', - help='A list of keys: (ex: name:),' - ' ignored if secret_keys_file is set'), - cfg.StrOpt('kds_endpoint', - help='KDS endpoint (ex: http://kds.example.com:35357/v3)'), -] -secure_message_group = cfg.OptGroup('secure_messages', - title='Secure Messaging options') - -LOG = logging.getLogger(__name__) - - -class SecureMessageException(Exception): - """Generic Exception for Secure Messages.""" - - msg = "An unknown Secure Message related exception occurred." - - def __init__(self, msg=None): - if msg is None: - msg = self.msg - super(SecureMessageException, self).__init__(msg) - - -class SharedKeyNotFound(SecureMessageException): - """No shared key was found and no other external authentication mechanism - is available. - """ - - msg = "Shared Key for [%s] Not Found. (%s)" - - def __init__(self, name, errmsg): - super(SharedKeyNotFound, self).__init__(self.msg % (name, errmsg)) - - -class InvalidMetadata(SecureMessageException): - """The metadata is invalid.""" - - msg = "Invalid metadata: %s" - - def __init__(self, err): - super(InvalidMetadata, self).__init__(self.msg % err) - - -class InvalidSignature(SecureMessageException): - """Signature validation failed.""" - - msg = "Failed to validate signature (source=%s, destination=%s)" - - def __init__(self, src, dst): - super(InvalidSignature, self).__init__(self.msg % (src, dst)) - - -class UnknownDestinationName(SecureMessageException): - """The Destination name is unknown to us.""" - - msg = "Invalid destination name (%s)" - - def __init__(self, name): - super(UnknownDestinationName, self).__init__(self.msg % name) - - -class InvalidEncryptedTicket(SecureMessageException): - """The Encrypted Ticket could not be successfully handled.""" - - msg = "Invalid Ticket (source=%s, destination=%s)" - - def __init__(self, src, dst): - super(InvalidEncryptedTicket, self).__init__(self.msg % (src, dst)) - - -class InvalidExpiredTicket(SecureMessageException): - """The ticket received is already expired.""" - - msg = "Expired ticket (source=%s, destination=%s)" - - def __init__(self, src, dst): - super(InvalidExpiredTicket, self).__init__(self.msg % (src, dst)) - - -class CommunicationError(SecureMessageException): - """The Communication with the KDS failed.""" - - msg = "Communication Error (target=%s): %s" - - def __init__(self, target, errmsg): - super(CommunicationError, self).__init__(self.msg % (target, errmsg)) - - -class InvalidArgument(SecureMessageException): - """Bad initialization argument.""" - - msg = "Invalid argument: %s" - - def __init__(self, errmsg): - super(InvalidArgument, self).__init__(self.msg % errmsg) - - -Ticket = collections.namedtuple('Ticket', ['skey', 'ekey', 'esek']) - - -class KeyStore(object): - """A storage class for Signing and Encryption Keys. - - This class creates an object that holds Generic Keys like Signing - Keys, Encryption Keys, Encrypted SEK Tickets ... - """ - - def __init__(self): - self._kvps = dict() - - def _get_key_name(self, source, target, ktype): - return (source, target, ktype) - - def _put(self, src, dst, ktype, expiration, data): - name = self._get_key_name(src, dst, ktype) - self._kvps[name] = (expiration, data) - - def _get(self, src, dst, ktype): - name = self._get_key_name(src, dst, ktype) - if name in self._kvps: - expiration, data = self._kvps[name] - if expiration > time.time(): - return data - else: - del self._kvps[name] - - return None - - def clear(self): - """Wipes the store clear of all data.""" - self._kvps.clear() - - def put_ticket(self, source, target, skey, ekey, esek, expiration): - """Puts a sek pair in the cache. - - :param source: Client name - :param target: Target name - :param skey: The Signing Key - :param ekey: The Encription Key - :param esek: The token encrypted with the target key - :param expiration: Expiration time in seconds since Epoch - """ - keys = Ticket(skey, ekey, esek) - self._put(source, target, 'ticket', expiration, keys) - - def get_ticket(self, source, target): - """Returns a Ticket (skey, ekey, esek) namedtuple for the - source/target pair. - """ - return self._get(source, target, 'ticket') - - -_KEY_STORE = KeyStore() - - -class _KDSClient(object): - - USER_AGENT = 'oslo-incubator/rpc' - - def __init__(self, endpoint=None, timeout=None): - """A KDS Client class.""" - - self._endpoint = endpoint - if timeout is not None: - self.timeout = float(timeout) - else: - self.timeout = None - - def _do_get(self, url, request): - req_kwargs = dict() - req_kwargs['headers'] = dict() - req_kwargs['headers']['User-Agent'] = self.USER_AGENT - req_kwargs['headers']['Content-Type'] = 'application/json' - req_kwargs['data'] = jsonutils.dumps({'request': request}) - if self.timeout is not None: - req_kwargs['timeout'] = self.timeout - - try: - resp = requests.get(url, **req_kwargs) - except requests.ConnectionError as e: - err = "Unable to establish connection. %s" % e - raise CommunicationError(url, err) - - return resp - - def _get_reply(self, url, resp): - if resp.text: - try: - body = jsonutils.loads(resp.text) - reply = body['reply'] - except (KeyError, TypeError, ValueError): - msg = "Failed to decode reply: %s" % resp.text - raise CommunicationError(url, msg) - else: - msg = "No reply data was returned." - raise CommunicationError(url, msg) - - return reply - - def _get_ticket(self, request, url=None, redirects=10): - """Send an HTTP request. - - Wraps around 'requests' to handle redirects and common errors. - """ - if url is None: - if not self._endpoint: - raise CommunicationError(url, 'Endpoint not configured') - url = self._endpoint + '/kds/ticket' - - while redirects: - resp = self._do_get(url, request) - if resp.status_code in (301, 302, 305): - # Redirected. Reissue the request to the new location. - url = resp.headers['location'] - redirects -= 1 - continue - elif resp.status_code != 200: - msg = "Request returned failure status: %s (%s)" - err = msg % (resp.status_code, resp.text) - raise CommunicationError(url, err) - - return self._get_reply(url, resp) - - raise CommunicationError(url, "Too many redirections, giving up!") - - def get_ticket(self, source, target, crypto, key): - - # prepare metadata - md = {'requestor': source, - 'target': target, - 'timestamp': time.time(), - 'nonce': struct.unpack('Q', os.urandom(8))[0]} - metadata = base64.b64encode(jsonutils.dumps(md)) - - # sign metadata - signature = crypto.sign(key, metadata) - - # HTTP request - reply = self._get_ticket({'metadata': metadata, - 'signature': signature}) - - # verify reply - signature = crypto.sign(key, (reply['metadata'] + reply['ticket'])) - if signature != reply['signature']: - raise InvalidEncryptedTicket(md['source'], md['destination']) - md = jsonutils.loads(base64.b64decode(reply['metadata'])) - if ((md['source'] != source or - md['destination'] != target or - md['expiration'] < time.time())): - raise InvalidEncryptedTicket(md['source'], md['destination']) - - # return ticket data - tkt = jsonutils.loads(crypto.decrypt(key, reply['ticket'])) - - return tkt, md['expiration'] - - -# we need to keep a global nonce, as this value should never repeat non -# matter how many SecureMessage objects we create -_NONCE = None - - -def _get_nonce(): - """We keep a single counter per instance, as it is so huge we can't - possibly cycle through within 1/100 of a second anyway. - """ - - global _NONCE - # Lazy initialize, for now get a random value, multiply by 2^32 and - # use it as the nonce base. The counter itself will rotate after - # 2^32 increments. - if _NONCE is None: - _NONCE = [struct.unpack('I', os.urandom(4))[0], 0] - - # Increment counter and wrap at 2^32 - _NONCE[1] += 1 - if _NONCE[1] > 0xffffffff: - _NONCE[1] = 0 - - # Return base + counter - return long((_NONCE[0] * 0xffffffff)) + _NONCE[1] - - -class SecureMessage(object): - """A Secure Message object. - - This class creates a signing/encryption facility for RPC messages. - It encapsulates all the necessary crypto primitives to insulate - regular code from the intricacies of message authentication, validation - and optionally encryption. - - :param topic: The topic name of the queue - :param host: The server name, together with the topic it forms a unique - name that is used to source signing keys, and verify - incoming messages. - :param conf: a ConfigOpts object - :param key: (optional) explicitly pass in endpoint private key. - If not provided it will be sourced from the service config - :param key_store: (optional) Storage class for local caching - :param encrypt: (defaults to False) Whether to encrypt messages - :param enctype: (defaults to AES) Cipher to use - :param hashtype: (defaults to SHA256) Hash function to use for signatures - """ - - def __init__(self, topic, host, conf, key=None, key_store=None, - encrypt=None, enctype='AES', hashtype='SHA256'): - - conf.register_group(secure_message_group) - conf.register_opts(secure_message_opts, group='secure_messages') - - self._name = '%s.%s' % (topic, host) - self._key = key - self._conf = conf.secure_messages - self._encrypt = self._conf.encrypt if (encrypt is None) else encrypt - self._crypto = cryptoutils.SymmetricCrypto(enctype, hashtype) - self._hkdf = cryptoutils.HKDF(hashtype) - self._kds = _KDSClient(self._conf.kds_endpoint) - - if self._key is None: - self._key = self._init_key(topic, self._name) - if self._key is None: - err = "Secret Key (or key file) is missing or malformed" - raise SharedKeyNotFound(self._name, err) - - self._key_store = key_store or _KEY_STORE - - def _init_key(self, topic, name): - keys = None - if self._conf.secret_keys_file: - with open(self._conf.secret_keys_file, 'r') as f: - keys = f.readlines() - elif self._conf.secret_key: - keys = self._conf.secret_key - - if keys is None: - return None - - for k in keys: - if k[0] == '#': - continue - if ':' not in k: - break - svc, key = k.split(':', 1) - if svc == topic or svc == name: - return base64.b64decode(key) - - return None - - def _split_key(self, key, size): - sig_key = key[:size] - enc_key = key[size:] - return sig_key, enc_key - - def _decode_esek(self, key, source, target, timestamp, esek): - """This function decrypts the esek buffer passed in and returns a - KeyStore to be used to check and decrypt the received message. - - :param key: The key to use to decrypt the ticket (esek) - :param source: The name of the source service - :param traget: The name of the target service - :param timestamp: The incoming message timestamp - :param esek: a base64 encoded encrypted block containing a JSON string - """ - rkey = None - - try: - s = self._crypto.decrypt(key, esek) - j = jsonutils.loads(s) - - rkey = base64.b64decode(j['key']) - expiration = j['timestamp'] + j['ttl'] - if j['timestamp'] > timestamp or timestamp > expiration: - raise InvalidExpiredTicket(source, target) - - except Exception: - raise InvalidEncryptedTicket(source, target) - - info = '%s,%s,%s' % (source, target, str(j['timestamp'])) - - sek = self._hkdf.expand(rkey, info, len(key) * 2) - - return self._split_key(sek, len(key)) - - def _get_ticket(self, target): - """This function will check if we already have a SEK for the specified - target in the cache, or will go and try to fetch a new SEK from the key - server. - - :param target: The name of the target service - """ - ticket = self._key_store.get_ticket(self._name, target) - - if ticket is not None: - return ticket - - tkt, expiration = self._kds.get_ticket(self._name, target, - self._crypto, self._key) - - self._key_store.put_ticket(self._name, target, - base64.b64decode(tkt['skey']), - base64.b64decode(tkt['ekey']), - tkt['esek'], expiration) - return self._key_store.get_ticket(self._name, target) - - def encode(self, version, target, json_msg): - """This is the main encoding function. - - It takes a target and a message and returns a tuple consisting of a - JSON serialized metadata object, a JSON serialized (and optionally - encrypted) message, and a signature. - - :param version: the current envelope version - :param target: The name of the target service (usually with hostname) - :param json_msg: a serialized json message object - """ - ticket = self._get_ticket(target) - - metadata = jsonutils.dumps({'source': self._name, - 'destination': target, - 'timestamp': time.time(), - 'nonce': _get_nonce(), - 'esek': ticket.esek, - 'encryption': self._encrypt}) - - message = json_msg - if self._encrypt: - message = self._crypto.encrypt(ticket.ekey, message) - - signature = self._crypto.sign(ticket.skey, - version + metadata + message) - - return (metadata, message, signature) - - def decode(self, version, metadata, message, signature): - """This is the main decoding function. - - It takes a version, metadata, message and signature strings and - returns a tuple with a (decrypted) message and metadata or raises - an exception in case of error. - - :param version: the current envelope version - :param metadata: a JSON serialized object with metadata for validation - :param message: a JSON serialized (base64 encoded encrypted) message - :param signature: a base64 encoded signature - """ - md = jsonutils.loads(metadata) - - check_args = ('source', 'destination', 'timestamp', - 'nonce', 'esek', 'encryption') - for arg in check_args: - if arg not in md: - raise InvalidMetadata('Missing metadata "%s"' % arg) - - if md['destination'] != self._name: - # TODO(simo) handle group keys by checking target - raise UnknownDestinationName(md['destination']) - - try: - skey, ekey = self._decode_esek(self._key, - md['source'], md['destination'], - md['timestamp'], md['esek']) - except InvalidExpiredTicket: - raise - except Exception: - raise InvalidMetadata('Failed to decode ESEK for %s/%s' % ( - md['source'], md['destination'])) - - sig = self._crypto.sign(skey, version + metadata + message) - - if sig != signature: - raise InvalidSignature(md['source'], md['destination']) - - if md['encryption'] is True: - msg = self._crypto.decrypt(ekey, message) - else: - msg = message - - return (md, msg) diff --git a/billingstack/openstack/common/rpc/serializer.py b/billingstack/openstack/common/rpc/serializer.py deleted file mode 100644 index 9bc6e2a..0000000 --- a/billingstack/openstack/common/rpc/serializer.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Provides the definition of an RPC serialization handler""" - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class Serializer(object): - """Generic (de-)serialization definition base class.""" - - @abc.abstractmethod - def serialize_entity(self, context, entity): - """Serialize something to primitive form. - - :param context: Security context - :param entity: Entity to be serialized - :returns: Serialized form of entity - """ - pass - - @abc.abstractmethod - def deserialize_entity(self, context, entity): - """Deserialize something from primitive form. - - :param context: Security context - :param entity: Primitive to be deserialized - :returns: Deserialized form of entity - """ - pass - - -class NoOpSerializer(Serializer): - """A serializer that does nothing.""" - - def serialize_entity(self, context, entity): - return entity - - def deserialize_entity(self, context, entity): - return entity diff --git a/billingstack/openstack/common/rpc/service.py b/billingstack/openstack/common/rpc/service.py deleted file mode 100644 index 385b2be..0000000 --- a/billingstack/openstack/common/rpc/service.py +++ /dev/null @@ -1,78 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2011 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import dispatcher as rpc_dispatcher -from billingstack.openstack.common import service - - -LOG = logging.getLogger(__name__) - - -class Service(service.Service): - """Service object for binaries running on hosts. - - A service enables rpc by listening to queues based on topic and host. - """ - def __init__(self, host, topic, manager=None, serializer=None): - super(Service, self).__init__() - self.host = host - self.topic = topic - self.serializer = serializer - if manager is None: - self.manager = self - else: - self.manager = manager - - def start(self): - super(Service, self).start() - - self.conn = rpc.create_connection(new=True) - LOG.debug(_("Creating Consumer connection for Service %s") % - self.topic) - - dispatcher = rpc_dispatcher.RpcDispatcher([self.manager], - self.serializer) - - # Share this same connection for these Consumers - self.conn.create_consumer(self.topic, dispatcher, fanout=False) - - node_topic = '%s.%s' % (self.topic, self.host) - self.conn.create_consumer(node_topic, dispatcher, fanout=False) - - self.conn.create_consumer(self.topic, dispatcher, fanout=True) - - # Hook to allow the manager to do other initializations after - # the rpc connection is created. - if callable(getattr(self.manager, 'initialize_service_hook', None)): - self.manager.initialize_service_hook(self) - - # Consume from all consumers in a thread - self.conn.consume_in_thread() - - def stop(self): - # Try to shut the connection down, but if we get any sort of - # errors, go ahead and ignore them.. as we're shutting down anyway - try: - self.conn.close() - except Exception: - pass - super(Service, self).stop() diff --git a/billingstack/openstack/common/rpc/zmq_receiver.py b/billingstack/openstack/common/rpc/zmq_receiver.py deleted file mode 100644 index 6fd8398..0000000 --- a/billingstack/openstack/common/rpc/zmq_receiver.py +++ /dev/null @@ -1,40 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import eventlet -eventlet.monkey_patch() - -import contextlib -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import impl_zmq - -CONF = cfg.CONF -CONF.register_opts(rpc.rpc_opts) -CONF.register_opts(impl_zmq.zmq_opts) - - -def main(): - CONF(sys.argv[1:], project='oslo') - logging.setup("oslo") - - with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: - reactor.consume_in_thread() - reactor.wait() diff --git a/billingstack/openstack/common/service.py b/billingstack/openstack/common/service.py deleted file mode 100644 index 0530911..0000000 --- a/billingstack/openstack/common/service.py +++ /dev/null @@ -1,461 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Generic Node base class for all workers that run on hosts.""" - -import errno -import logging as std_logging -import os -import random -import signal -import sys -import time - -import eventlet -from eventlet import event -from oslo.config import cfg - -from billingstack.openstack.common import eventlet_backdoor -from billingstack.openstack.common.gettextutils import _ # noqa -from billingstack.openstack.common import importutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import threadgroup - - -rpc = importutils.try_import('billingstack.openstack.common.rpc') -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -def _sighup_supported(): - return hasattr(signal, 'SIGHUP') - - -def _is_sighup(signo): - return _sighup_supported() and signo == signal.SIGHUP - - -def _signo_to_signame(signo): - signals = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'} - if _sighup_supported(): - signals[signal.SIGHUP] = 'SIGHUP' - return signals[signo] - - -def _set_signals_handler(handler): - signal.signal(signal.SIGTERM, handler) - signal.signal(signal.SIGINT, handler) - if _sighup_supported(): - signal.signal(signal.SIGHUP, handler) - - -class Launcher(object): - """Launch one or more services and wait for them to complete.""" - - def __init__(self): - """Initialize the service launcher. - - :returns: None - - """ - self.services = Services() - self.backdoor_port = eventlet_backdoor.initialize_if_enabled() - - def launch_service(self, service): - """Load and start the given service. - - :param service: The service you would like to start. - :returns: None - - """ - service.backdoor_port = self.backdoor_port - self.services.add(service) - - def stop(self): - """Stop all services which are currently running. - - :returns: None - - """ - self.services.stop() - - def wait(self): - """Waits until all services have been stopped, and then returns. - - :returns: None - - """ - self.services.wait() - - def restart(self): - """Reload config files and restart service. - - :returns: None - - """ - cfg.CONF.reload_config_files() - self.services.restart() - - -class SignalExit(SystemExit): - def __init__(self, signo, exccode=1): - super(SignalExit, self).__init__(exccode) - self.signo = signo - - -class ServiceLauncher(Launcher): - def _handle_signal(self, signo, frame): - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - raise SignalExit(signo) - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _wait_for_exit_or_signal(self, ready_callback=None): - status = None - signo = 0 - - LOG.debug(_('Full set of CONF:')) - CONF.log_opt_values(LOG, std_logging.DEBUG) - - try: - if ready_callback: - ready_callback() - super(ServiceLauncher, self).wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - finally: - self.stop() - if rpc: - try: - rpc.cleanup() - except Exception: - # We're shutting down, so it doesn't matter at this point. - LOG.exception(_('Exception during rpc cleanup.')) - - return status, signo - - def wait(self, ready_callback=None): - while True: - self.handle_signal() - status, signo = self._wait_for_exit_or_signal(ready_callback) - if not _is_sighup(signo): - return status - self.restart() - - -class ServiceWrapper(object): - def __init__(self, service, workers): - self.service = service - self.workers = workers - self.children = set() - self.forktimes = [] - - -class ProcessLauncher(object): - def __init__(self): - self.children = {} - self.sigcaught = None - self.running = True - rfd, self.writepipe = os.pipe() - self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r') - self.handle_signal() - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _handle_signal(self, signo, frame): - self.sigcaught = signo - self.running = False - - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - - def _pipe_watcher(self): - # This will block until the write end is closed when the parent - # dies unexpectedly - self.readpipe.read() - - LOG.info(_('Parent process has died unexpectedly, exiting')) - - sys.exit(1) - - def _child_process_handle_signal(self): - # Setup child signal handlers differently - def _sigterm(*args): - signal.signal(signal.SIGTERM, signal.SIG_DFL) - raise SignalExit(signal.SIGTERM) - - def _sighup(*args): - signal.signal(signal.SIGHUP, signal.SIG_DFL) - raise SignalExit(signal.SIGHUP) - - signal.signal(signal.SIGTERM, _sigterm) - if _sighup_supported(): - signal.signal(signal.SIGHUP, _sighup) - # Block SIGINT and let the parent send us a SIGTERM - signal.signal(signal.SIGINT, signal.SIG_IGN) - - def _child_wait_for_exit_or_signal(self, launcher): - status = None - signo = 0 - - # NOTE(johannes): All exceptions are caught to ensure this - # doesn't fallback into the loop spawning children. It would - # be bad for a child to spawn more children. - try: - launcher.wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - except BaseException: - LOG.exception(_('Unhandled exception')) - status = 2 - finally: - launcher.stop() - - return status, signo - - def _child_process(self, service): - self._child_process_handle_signal() - - # Reopen the eventlet hub to make sure we don't share an epoll - # fd with parent and/or siblings, which would be bad - eventlet.hubs.use_hub() - - # Close write to ensure only parent has it open - os.close(self.writepipe) - # Create greenthread to watch for parent to close pipe - eventlet.spawn_n(self._pipe_watcher) - - # Reseed random number generator - random.seed() - - launcher = Launcher() - launcher.launch_service(service) - return launcher - - def _start_child(self, wrap): - if len(wrap.forktimes) > wrap.workers: - # Limit ourselves to one process a second (over the period of - # number of workers * 1 second). This will allow workers to - # start up quickly but ensure we don't fork off children that - # die instantly too quickly. - if time.time() - wrap.forktimes[0] < wrap.workers: - LOG.info(_('Forking too fast, sleeping')) - time.sleep(1) - - wrap.forktimes.pop(0) - - wrap.forktimes.append(time.time()) - - pid = os.fork() - if pid == 0: - launcher = self._child_process(wrap.service) - while True: - self._child_process_handle_signal() - status, signo = self._child_wait_for_exit_or_signal(launcher) - if not _is_sighup(signo): - break - launcher.restart() - - os._exit(status) - - LOG.info(_('Started child %d'), pid) - - wrap.children.add(pid) - self.children[pid] = wrap - - return pid - - def launch_service(self, service, workers=1): - wrap = ServiceWrapper(service, workers) - - LOG.info(_('Starting %d workers'), wrap.workers) - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def _wait_child(self): - try: - # Don't block if no child processes have exited - pid, status = os.waitpid(0, os.WNOHANG) - if not pid: - return None - except OSError as exc: - if exc.errno not in (errno.EINTR, errno.ECHILD): - raise - return None - - if os.WIFSIGNALED(status): - sig = os.WTERMSIG(status) - LOG.info(_('Child %(pid)d killed by signal %(sig)d'), - dict(pid=pid, sig=sig)) - else: - code = os.WEXITSTATUS(status) - LOG.info(_('Child %(pid)s exited with status %(code)d'), - dict(pid=pid, code=code)) - - if pid not in self.children: - LOG.warning(_('pid %d not in child list'), pid) - return None - - wrap = self.children.pop(pid) - wrap.children.remove(pid) - return wrap - - def _respawn_children(self): - while self.running: - wrap = self._wait_child() - if not wrap: - # Yield to other threads if no children have exited - # Sleep for a short time to avoid excessive CPU usage - # (see bug #1095346) - eventlet.greenthread.sleep(.01) - continue - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def wait(self): - """Loop waiting on children to die and respawning as necessary.""" - - LOG.debug(_('Full set of CONF:')) - CONF.log_opt_values(LOG, std_logging.DEBUG) - - while True: - self.handle_signal() - self._respawn_children() - if self.sigcaught: - signame = _signo_to_signame(self.sigcaught) - LOG.info(_('Caught %s, stopping children'), signame) - if not _is_sighup(self.sigcaught): - break - - for pid in self.children: - os.kill(pid, signal.SIGHUP) - self.running = True - self.sigcaught = None - - for pid in self.children: - try: - os.kill(pid, signal.SIGTERM) - except OSError as exc: - if exc.errno != errno.ESRCH: - raise - - # Wait for children to die - if self.children: - LOG.info(_('Waiting on %d children to exit'), len(self.children)) - while self.children: - self._wait_child() - - -class Service(object): - """Service object for binaries running on hosts.""" - - def __init__(self, threads=1000): - self.tg = threadgroup.ThreadGroup(threads) - - # signal that the service is done shutting itself down: - self._done = event.Event() - - def reset(self): - # NOTE(Fengqian): docs for Event.reset() recommend against using it - self._done = event.Event() - - def start(self): - pass - - def stop(self): - self.tg.stop() - self.tg.wait() - # Signal that service cleanup is done: - if not self._done.ready(): - self._done.send() - - def wait(self): - self._done.wait() - - -class Services(object): - - def __init__(self): - self.services = [] - self.tg = threadgroup.ThreadGroup() - self.done = event.Event() - - def add(self, service): - self.services.append(service) - self.tg.add_thread(self.run_service, service, self.done) - - def stop(self): - # wait for graceful shutdown of services: - for service in self.services: - service.stop() - service.wait() - - # Each service has performed cleanup, now signal that the run_service - # wrapper threads can now die: - if not self.done.ready(): - self.done.send() - - # reap threads: - self.tg.stop() - - def wait(self): - self.tg.wait() - - def restart(self): - self.stop() - self.done = event.Event() - for restart_service in self.services: - restart_service.reset() - self.tg.add_thread(self.run_service, restart_service, self.done) - - @staticmethod - def run_service(service, done): - """Service start wrapper. - - :param service: service to run - :param done: event to wait on until a shutdown is triggered - :returns: None - - """ - service.start() - done.wait() - - -def launch(service, workers=None): - if workers: - launcher = ProcessLauncher() - launcher.launch_service(service, workers=workers) - else: - launcher = ServiceLauncher() - launcher.launch_service(service) - return launcher diff --git a/billingstack/openstack/common/sslutils.py b/billingstack/openstack/common/sslutils.py deleted file mode 100644 index a3ae3c7..0000000 --- a/billingstack/openstack/common/sslutils.py +++ /dev/null @@ -1,100 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import ssl - -from oslo.config import cfg - -from billingstack.openstack.common.gettextutils import _ # noqa - - -ssl_opts = [ - cfg.StrOpt('ca_file', - default=None, - help="CA certificate file to use to verify " - "connecting clients"), - cfg.StrOpt('cert_file', - default=None, - help="Certificate file to use when starting " - "the server securely"), - cfg.StrOpt('key_file', - default=None, - help="Private key file to use when starting " - "the server securely"), -] - - -CONF = cfg.CONF -CONF.register_opts(ssl_opts, "ssl") - - -def is_enabled(): - cert_file = CONF.ssl.cert_file - key_file = CONF.ssl.key_file - ca_file = CONF.ssl.ca_file - use_ssl = cert_file or key_file - - if cert_file and not os.path.exists(cert_file): - raise RuntimeError(_("Unable to find cert_file : %s") % cert_file) - - if ca_file and not os.path.exists(ca_file): - raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) - - if key_file and not os.path.exists(key_file): - raise RuntimeError(_("Unable to find key_file : %s") % key_file) - - if use_ssl and (not cert_file or not key_file): - raise RuntimeError(_("When running server in SSL mode, you must " - "specify both a cert_file and key_file " - "option value in your configuration file")) - - return use_ssl - - -def wrap(sock): - ssl_kwargs = { - 'server_side': True, - 'certfile': CONF.ssl.cert_file, - 'keyfile': CONF.ssl.key_file, - 'cert_reqs': ssl.CERT_NONE, - } - - if CONF.ssl.ca_file: - ssl_kwargs['ca_certs'] = CONF.ssl.ca_file - ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED - - return ssl.wrap_socket(sock, **ssl_kwargs) - - -_SSL_PROTOCOLS = { - "tlsv1": ssl.PROTOCOL_TLSv1, - "sslv23": ssl.PROTOCOL_SSLv23, - "sslv3": ssl.PROTOCOL_SSLv3 -} - -try: - _SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2 -except AttributeError: - pass - - -def validate_ssl_version(version): - key = version.lower() - try: - return _SSL_PROTOCOLS[key] - except KeyError: - raise RuntimeError(_("Invalid SSL version : %s") % version) diff --git a/billingstack/openstack/common/test.py b/billingstack/openstack/common/test.py deleted file mode 100644 index 8d63bdc..0000000 --- a/billingstack/openstack/common/test.py +++ /dev/null @@ -1,54 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010-2011 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Common utilities used in testing""" - -import os - -import fixtures -import testtools - - -class BaseTestCase(testtools.TestCase): - - def setUp(self): - super(BaseTestCase, self).setUp() - self._set_timeout() - self._fake_output() - self.useFixture(fixtures.FakeLogger('billingstack.openstack.common')) - self.useFixture(fixtures.NestedTempfile()) - self.useFixture(fixtures.TempHomeDir()) - - def _set_timeout(self): - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - def _fake_output(self): - if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or - os.environ.get('OS_STDOUT_CAPTURE') == '1'): - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or - os.environ.get('OS_STDERR_CAPTURE') == '1'): - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) diff --git a/billingstack/openstack/common/threadgroup.py b/billingstack/openstack/common/threadgroup.py deleted file mode 100644 index c7f9153..0000000 --- a/billingstack/openstack/common/threadgroup.py +++ /dev/null @@ -1,125 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import eventlet -from eventlet import greenpool -from eventlet import greenthread - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import loopingcall - - -LOG = logging.getLogger(__name__) - - -def _thread_done(gt, *args, **kwargs): - """Callback function to be passed to GreenThread.link() when we spawn() - Calls the :class:`ThreadGroup` to notify if. - - """ - kwargs['group'].thread_done(kwargs['thread']) - - -class Thread(object): - """Wrapper around a greenthread, that holds a reference to the - :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when - it has done so it can be removed from the threads list. - """ - def __init__(self, thread, group): - self.thread = thread - self.thread.link(_thread_done, group=group, thread=self) - - def stop(self): - self.thread.kill() - - def wait(self): - return self.thread.wait() - - def link(self, func, *args, **kwargs): - self.thread.link(func, *args, **kwargs) - - -class ThreadGroup(object): - """The point of the ThreadGroup classis to: - - * keep track of timers and greenthreads (making it easier to stop them - when need be). - * provide an easy API to add timers. - """ - def __init__(self, thread_pool_size=10): - self.pool = greenpool.GreenPool(thread_pool_size) - self.threads = [] - self.timers = [] - - def add_dynamic_timer(self, callback, initial_delay=None, - periodic_interval_max=None, *args, **kwargs): - timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) - timer.start(initial_delay=initial_delay, - periodic_interval_max=periodic_interval_max) - self.timers.append(timer) - - def add_timer(self, interval, callback, initial_delay=None, - *args, **kwargs): - pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) - pulse.start(interval=interval, - initial_delay=initial_delay) - self.timers.append(pulse) - - def add_thread(self, callback, *args, **kwargs): - gt = self.pool.spawn(callback, *args, **kwargs) - th = Thread(gt, self) - self.threads.append(th) - return th - - def thread_done(self, thread): - self.threads.remove(thread) - - def stop(self): - current = greenthread.getcurrent() - for x in self.threads: - if x is current: - # don't kill the current thread. - continue - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - - for x in self.timers: - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - self.timers = [] - - def wait(self): - for x in self.timers: - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) - current = greenthread.getcurrent() - for x in self.threads: - if x is current: - continue - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) diff --git a/billingstack/openstack/common/timeutils.py b/billingstack/openstack/common/timeutils.py deleted file mode 100644 index b79ebf3..0000000 --- a/billingstack/openstack/common/timeutils.py +++ /dev/null @@ -1,197 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Time related utilities and helper functions. -""" - -import calendar -import datetime -import time - -import iso8601 -import six - - -# ISO 8601 extended time format with microseconds -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' -PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - if not at: - at = utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def parse_isotime(timestr): - """Parse time from ISO 8601 format.""" - try: - return iso8601.parse_date(timestr) - except iso8601.ParseError as e: - raise ValueError(six.text_type(e)) - except TypeError as e: - raise ValueError(six.text_type(e)) - - -def strtime(at=None, fmt=PERFECT_TIME_FORMAT): - """Returns formatted utcnow.""" - if not at: - at = utcnow() - return at.strftime(fmt) - - -def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT): - """Turn a formatted time back into a datetime.""" - return datetime.datetime.strptime(timestr, fmt) - - -def normalize_time(timestamp): - """Normalize time in arbitrary timezone to UTC naive object.""" - offset = timestamp.utcoffset() - if offset is None: - return timestamp - return timestamp.replace(tzinfo=None) - offset - - -def is_older_than(before, seconds): - """Return True if before is older than seconds.""" - if isinstance(before, six.string_types): - before = parse_strtime(before).replace(tzinfo=None) - return utcnow() - before > datetime.timedelta(seconds=seconds) - - -def is_newer_than(after, seconds): - """Return True if after is newer than seconds.""" - if isinstance(after, six.string_types): - after = parse_strtime(after).replace(tzinfo=None) - return after - utcnow() > datetime.timedelta(seconds=seconds) - - -def utcnow_ts(): - """Timestamp version of our utcnow function.""" - if utcnow.override_time is None: - # NOTE(kgriffs): This is several times faster - # than going through calendar.timegm(...) - return int(time.time()) - - return calendar.timegm(utcnow().timetuple()) - - -def utcnow(): - """Overridable version of utils.utcnow.""" - if utcnow.override_time: - try: - return utcnow.override_time.pop(0) - except AttributeError: - return utcnow.override_time - return datetime.datetime.utcnow() - - -def iso8601_from_timestamp(timestamp): - """Returns a iso8601 formated date from timestamp.""" - return isotime(datetime.datetime.utcfromtimestamp(timestamp)) - - -utcnow.override_time = None - - -def set_time_override(override_time=None): - """Overrides utils.utcnow. - - Make it return a constant time or a list thereof, one at a time. - - :param override_time: datetime instance or list thereof. If not - given, defaults to the current UTC time. - """ - utcnow.override_time = override_time or datetime.datetime.utcnow() - - -def advance_time_delta(timedelta): - """Advance overridden time using a datetime.timedelta.""" - assert(not utcnow.override_time is None) - try: - for dt in utcnow.override_time: - dt += timedelta - except TypeError: - utcnow.override_time += timedelta - - -def advance_time_seconds(seconds): - """Advance overridden time by seconds.""" - advance_time_delta(datetime.timedelta(0, seconds)) - - -def clear_time_override(): - """Remove the overridden time.""" - utcnow.override_time = None - - -def marshall_now(now=None): - """Make an rpc-safe datetime with microseconds. - - Note: tzinfo is stripped, but not required for relative times. - """ - if not now: - now = utcnow() - return dict(day=now.day, month=now.month, year=now.year, hour=now.hour, - minute=now.minute, second=now.second, - microsecond=now.microsecond) - - -def unmarshall_time(tyme): - """Unmarshall a datetime dict.""" - return datetime.datetime(day=tyme['day'], - month=tyme['month'], - year=tyme['year'], - hour=tyme['hour'], - minute=tyme['minute'], - second=tyme['second'], - microsecond=tyme['microsecond']) - - -def delta_seconds(before, after): - """Return the difference between two timing objects. - - Compute the difference in seconds between two date, time, or - datetime objects (as a float, to microsecond resolution). - """ - delta = after - before - try: - return delta.total_seconds() - except AttributeError: - return ((delta.days * 24 * 3600) + delta.seconds + - float(delta.microseconds) / (10 ** 6)) - - -def is_soon(dt, window): - """Determines if time is going to happen in the next window seconds. - - :params dt: the time - :params window: minimum seconds to remain to consider the time not soon - - :return: True if expiration is within the given duration - """ - soon = (utcnow() + datetime.timedelta(seconds=window)) - return normalize_time(dt) <= soon diff --git a/billingstack/openstack/common/utils.py b/billingstack/openstack/common/utils.py deleted file mode 100644 index 6de5cbe..0000000 --- a/billingstack/openstack/common/utils.py +++ /dev/null @@ -1,140 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -System-level utilities and helper functions. -""" - -import logging -import random -import shlex - -from eventlet import greenthread -from eventlet.green import subprocess - -from billingstack.openstack.common import exception -from billingstack.openstack.common.gettextutils import _ - - -LOG = logging.getLogger(__name__) - - -def int_from_bool_as_string(subject): - """ - Interpret a string as a boolean and return either 1 or 0. - - Any string value in: - ('True', 'true', 'On', 'on', '1') - is interpreted as a boolean True. - - Useful for JSON-decoded stuff and config file parsing - """ - return bool_from_string(subject) and 1 or 0 - - -def bool_from_string(subject): - """ - Interpret a string as a boolean. - - Any string value in: - ('True', 'true', 'On', 'on', 'Yes', 'yes', '1') - is interpreted as a boolean True. - - Useful for JSON-decoded stuff and config file parsing - """ - if isinstance(subject, bool): - return subject - if isinstance(subject, basestring): - if subject.strip().lower() in ('true', 'on', 'yes', '1'): - return True - return False - - -def execute(*cmd, **kwargs): - """ - Helper method to execute command with optional retry. - - :cmd Passed to subprocess.Popen. - :process_input Send to opened process. - :check_exit_code Defaults to 0. Raise exception.ProcessExecutionError - unless program exits with this code. - :delay_on_retry True | False. Defaults to True. If set to True, wait a - short amount of time before retrying. - :attempts How many times to retry cmd. - :run_as_root True | False. Defaults to False. If set to True, - the command is prefixed by the command specified - in the root_helper kwarg. - :root_helper command to prefix all cmd's with - - :raises exception.Error on receiving unknown arguments - :raises exception.ProcessExecutionError - """ - - process_input = kwargs.pop('process_input', None) - check_exit_code = kwargs.pop('check_exit_code', 0) - delay_on_retry = kwargs.pop('delay_on_retry', True) - attempts = kwargs.pop('attempts', 1) - run_as_root = kwargs.pop('run_as_root', False) - root_helper = kwargs.pop('root_helper', '') - if len(kwargs): - raise exception.Error(_('Got unknown keyword args ' - 'to utils.execute: %r') % kwargs) - if run_as_root: - cmd = shlex.split(root_helper) + list(cmd) - cmd = map(str, cmd) - - while attempts > 0: - attempts -= 1 - try: - LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd)) - _PIPE = subprocess.PIPE # pylint: disable=E1101 - obj = subprocess.Popen(cmd, - stdin=_PIPE, - stdout=_PIPE, - stderr=_PIPE, - close_fds=True) - result = None - if process_input is not None: - result = obj.communicate(process_input) - else: - result = obj.communicate() - obj.stdin.close() # pylint: disable=E1101 - _returncode = obj.returncode # pylint: disable=E1101 - if _returncode: - LOG.debug(_('Result was %s') % _returncode) - if (isinstance(check_exit_code, int) and - not isinstance(check_exit_code, bool) and - _returncode != check_exit_code): - (stdout, stderr) = result - raise exception.ProcessExecutionError( - exit_code=_returncode, - stdout=stdout, - stderr=stderr, - cmd=' '.join(cmd)) - return result - except exception.ProcessExecutionError: - if not attempts: - raise - else: - LOG.debug(_('%r failed. Retrying.'), cmd) - if delay_on_retry: - greenthread.sleep(random.randint(20, 200) / 100.0) - finally: - # NOTE(termie): this appears to be necessary to let the subprocess - # call clean something up in between calls, without - # it two execute calls in a row hangs the second one - greenthread.sleep(0) diff --git a/billingstack/openstack/common/uuidutils.py b/billingstack/openstack/common/uuidutils.py deleted file mode 100644 index 7608acb..0000000 --- a/billingstack/openstack/common/uuidutils.py +++ /dev/null @@ -1,39 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2012 Intel Corporation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -UUID related utilities and helper functions. -""" - -import uuid - - -def generate_uuid(): - return str(uuid.uuid4()) - - -def is_uuid_like(val): - """Returns validation of a value as a UUID. - - For our purposes, a UUID is a canonical form string: - aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa - - """ - try: - return str(uuid.UUID(val)) == val - except (TypeError, ValueError, AttributeError): - return False diff --git a/billingstack/openstack/common/versionutils.py b/billingstack/openstack/common/versionutils.py deleted file mode 100644 index f7b1f8a..0000000 --- a/billingstack/openstack/common/versionutils.py +++ /dev/null @@ -1,45 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helpers for comparing version strings. -""" - -import pkg_resources - - -def is_compatible(requested_version, current_version, same_major=True): - """Determine whether `requested_version` is satisfied by - `current_version`; in other words, `current_version` is >= - `requested_version`. - - :param requested_version: version to check for compatibility - :param current_version: version to check against - :param same_major: if True, the major version must be identical between - `requested_version` and `current_version`. This is used when a - major-version difference indicates incompatibility between the two - versions. Since this is the common-case in practice, the default is - True. - :returns: True if compatible, False if not - """ - requested_parts = pkg_resources.parse_version(requested_version) - current_parts = pkg_resources.parse_version(current_version) - - if same_major and (requested_parts[0] != current_parts[0]): - return False - - return current_parts >= requested_parts diff --git a/billingstack/openstack/common/wsgi.py b/billingstack/openstack/common/wsgi.py deleted file mode 100644 index 78d59d5..0000000 --- a/billingstack/openstack/common/wsgi.py +++ /dev/null @@ -1,797 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utility methods for working with WSGI servers.""" - -import eventlet -eventlet.patcher.monkey_patch(all=False, socket=True) - -import datetime -import errno -import socket -import sys -import time - -import eventlet.wsgi -from oslo.config import cfg -import routes -import routes.middleware -import webob.dec -import webob.exc -from xml.dom import minidom -from xml.parsers import expat - -from billingstack.openstack.common import exception -from billingstack.openstack.common.gettextutils import _ -from billingstack.openstack.common import jsonutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service -from billingstack.openstack.common import sslutils -from billingstack.openstack.common import xmlutils - -socket_opts = [ - cfg.IntOpt('backlog', - default=4096, - help="Number of backlog requests to configure the socket with"), - cfg.IntOpt('tcp_keepidle', - default=600, - help="Sets the value of TCP_KEEPIDLE in seconds for each " - "server socket. Not supported on OS X."), -] - -CONF = cfg.CONF -CONF.register_opts(socket_opts) - -LOG = logging.getLogger(__name__) - - -def run_server(application, port): - """Run a WSGI server with the given application.""" - sock = eventlet.listen(('0.0.0.0', port)) - eventlet.wsgi.server(sock, application) - - -class Service(service.Service): - """ - Provides a Service API for wsgi servers. - - This gives us the ability to launch wsgi servers with the - Launcher classes in service.py. - """ - - def __init__(self, application, port, - host='0.0.0.0', backlog=4096, threads=1000): - self.application = application - self._port = port - self._host = host - self._backlog = backlog if backlog else CONF.backlog - super(Service, self).__init__(threads) - - def _get_socket(self, host, port, backlog): - # TODO(dims): eventlet's green dns/socket module does not actually - # support IPv6 in getaddrinfo(). We need to get around this in the - # future or monitor upstream for a fix - info = socket.getaddrinfo(host, - port, - socket.AF_UNSPEC, - socket.SOCK_STREAM)[0] - family = info[0] - bind_addr = info[-1] - - sock = None - retry_until = time.time() + 30 - while not sock and time.time() < retry_until: - try: - sock = eventlet.listen(bind_addr, - backlog=backlog, - family=family) - if sslutils.is_enabled(): - sock = sslutils.wrap(sock) - - except socket.error, err: - if err.args[0] != errno.EADDRINUSE: - raise - eventlet.sleep(0.1) - if not sock: - raise RuntimeError(_("Could not bind to %(host)s:%(port)s " - "after trying for 30 seconds") % - {'host': host, 'port': port}) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - # sockets can hang around forever without keepalive - sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - - # This option isn't available in the OS X version of eventlet - if hasattr(socket, 'TCP_KEEPIDLE'): - sock.setsockopt(socket.IPPROTO_TCP, - socket.TCP_KEEPIDLE, - CONF.tcp_keepidle) - - return sock - - def start(self): - """Start serving this service using the provided server instance. - - :returns: None - - """ - super(Service, self).start() - self._socket = self._get_socket(self._host, self._port, self._backlog) - self.tg.add_thread(self._run, self.application, self._socket) - - @property - def backlog(self): - return self._backlog - - @property - def host(self): - return self._socket.getsockname()[0] if self._socket else self._host - - @property - def port(self): - return self._socket.getsockname()[1] if self._socket else self._port - - def stop(self): - """Stop serving this API. - - :returns: None - - """ - super(Service, self).stop() - - def _run(self, application, socket): - """Start a WSGI server in a new green thread.""" - logger = logging.getLogger('eventlet.wsgi') - eventlet.wsgi.server(socket, - application, - custom_pool=self.tg.pool, - log=logging.WritableLogger(logger)) - - -class Middleware(object): - """ - Base WSGI middleware wrapper. These classes require an application to be - initialized that will be called next. By default the middleware will - simply call its wrapped app, or you can override __call__ to customize its - behavior. - """ - - def __init__(self, application): - self.application = application - - def process_request(self, req): - """ - Called on each request. - - If this returns None, the next application down the stack will be - executed. If it returns a response then that response will be returned - and execution will stop here. - """ - return None - - def process_response(self, response): - """Do whatever you'd like to the response.""" - return response - - @webob.dec.wsgify - def __call__(self, req): - response = self.process_request(req) - if response: - return response - response = req.get_response(self.application) - return self.process_response(response) - - -class Debug(Middleware): - """ - Helper class that can be inserted into any WSGI application chain - to get information about the request and response. - """ - - @webob.dec.wsgify - def __call__(self, req): - print ("*" * 40) + " REQUEST ENVIRON" - for key, value in req.environ.items(): - print key, "=", value - print - resp = req.get_response(self.application) - - print ("*" * 40) + " RESPONSE HEADERS" - for (key, value) in resp.headers.iteritems(): - print key, "=", value - print - - resp.app_iter = self.print_generator(resp.app_iter) - - return resp - - @staticmethod - def print_generator(app_iter): - """ - Iterator that prints the contents of a wrapper string iterator - when iterated. - """ - print ("*" * 40) + " BODY" - for part in app_iter: - sys.stdout.write(part) - sys.stdout.flush() - yield part - print - - -class Router(object): - - """ - WSGI middleware that maps incoming requests to WSGI apps. - """ - - def __init__(self, mapper): - """ - Create a router for the given routes.Mapper. - - Each route in `mapper` must specify a 'controller', which is a - WSGI app to call. You'll probably want to specify an 'action' as - well and have your controller be a wsgi.Controller, who will route - the request to the action method. - - Examples: - mapper = routes.Mapper() - sc = ServerController() - - # Explicit mapping of one route to a controller+action - mapper.connect(None, "/svrlist", controller=sc, action="list") - - # Actions are all implicitly defined - mapper.resource("server", "servers", controller=sc) - - # Pointing to an arbitrary WSGI app. You can specify the - # {path_info:.*} parameter so the target app can be handed just that - # section of the URL. - mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp()) - """ - self.map = mapper - self._router = routes.middleware.RoutesMiddleware(self._dispatch, - self.map) - - @webob.dec.wsgify - def __call__(self, req): - """ - Route the incoming request to a controller based on self.map. - If no match, return a 404. - """ - return self._router - - @staticmethod - @webob.dec.wsgify - def _dispatch(req): - """ - Called by self._router after matching the incoming request to a route - and putting the information into req.environ. Either returns 404 - or the routed WSGI app's response. - """ - match = req.environ['wsgiorg.routing_args'][1] - if not match: - return webob.exc.HTTPNotFound() - app = match['controller'] - return app - - -class Request(webob.Request): - """Add some Openstack API-specific logic to the base webob.Request.""" - - default_request_content_types = ('application/json', 'application/xml') - default_accept_types = ('application/json', 'application/xml') - default_accept_type = 'application/json' - - def best_match_content_type(self, supported_content_types=None): - """Determine the requested response content-type. - - Based on the query extension then the Accept header. - Defaults to default_accept_type if we don't find a preference - - """ - supported_content_types = (supported_content_types or - self.default_accept_types) - - parts = self.path.rsplit('.', 1) - if len(parts) > 1: - ctype = 'application/{0}'.format(parts[1]) - if ctype in supported_content_types: - return ctype - - bm = self.accept.best_match(supported_content_types) - return bm or self.default_accept_type - - def get_content_type(self, allowed_content_types=None): - """Determine content type of the request body. - - Does not do any body introspection, only checks header - - """ - if "Content-Type" not in self.headers: - return None - - content_type = self.content_type - allowed_content_types = (allowed_content_types or - self.default_request_content_types) - - if content_type not in allowed_content_types: - raise exception.InvalidContentType(content_type=content_type) - return content_type - - -class Resource(object): - """ - WSGI app that handles (de)serialization and controller dispatch. - - Reads routing information supplied by RoutesMiddleware and calls - the requested action method upon its deserializer, controller, - and serializer. Those three objects may implement any of the basic - controller action methods (create, update, show, index, delete) - along with any that may be specified in the api router. A 'default' - method may also be implemented to be used in place of any - non-implemented actions. Deserializer methods must accept a request - argument and return a dictionary. Controller methods must accept a - request argument. Additionally, they must also accept keyword - arguments that represent the keys returned by the Deserializer. They - may raise a webob.exc exception or return a dict, which will be - serialized by requested content type. - """ - def __init__(self, controller, deserializer=None, serializer=None): - """ - :param controller: object that implement methods created by routes lib - :param deserializer: object that supports webob request deserialization - through controller-like actions - :param serializer: object that supports webob response serialization - through controller-like actions - """ - self.controller = controller - self.serializer = serializer or ResponseSerializer() - self.deserializer = deserializer or RequestDeserializer() - - @webob.dec.wsgify(RequestClass=Request) - def __call__(self, request): - """WSGI method that controls (de)serialization and method dispatch.""" - - try: - action, action_args, accept = self.deserialize_request(request) - except exception.InvalidContentType: - msg = _("Unsupported Content-Type") - return webob.exc.HTTPUnsupportedMediaType(explanation=msg) - except exception.MalformedRequestBody: - msg = _("Malformed request body") - return webob.exc.HTTPBadRequest(explanation=msg) - - action_result = self.execute_action(action, request, **action_args) - try: - return self.serialize_response(action, action_result, accept) - # return unserializable result (typically a webob exc) - except Exception: - return action_result - - def deserialize_request(self, request): - return self.deserializer.deserialize(request) - - def serialize_response(self, action, action_result, accept): - return self.serializer.serialize(action_result, accept, action) - - def execute_action(self, action, request, **action_args): - return self.dispatch(self.controller, action, request, **action_args) - - def dispatch(self, obj, action, *args, **kwargs): - """Find action-specific method on self and call it.""" - try: - method = getattr(obj, action) - except AttributeError: - method = getattr(obj, 'default') - - return method(*args, **kwargs) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - try: - del args['controller'] - except KeyError: - pass - - try: - del args['format'] - except KeyError: - pass - - return args - - -class ActionDispatcher(object): - """Maps method name to local methods through action name.""" - - def dispatch(self, *args, **kwargs): - """Find and call local method.""" - action = kwargs.pop('action', 'default') - action_method = getattr(self, str(action), self.default) - return action_method(*args, **kwargs) - - def default(self, data): - raise NotImplementedError() - - -class DictSerializer(ActionDispatcher): - """Default request body serialization""" - - def serialize(self, data, action='default'): - return self.dispatch(data, action=action) - - def default(self, data): - return "" - - -class JSONDictSerializer(DictSerializer): - """Default JSON request body serialization""" - - def default(self, data): - def sanitizer(obj): - if isinstance(obj, datetime.datetime): - _dtime = obj - datetime.timedelta(microseconds=obj.microsecond) - return _dtime.isoformat() - return unicode(obj) - return jsonutils.dumps(data, default=sanitizer) - - -class XMLDictSerializer(DictSerializer): - - def __init__(self, metadata=None, xmlns=None): - """ - :param metadata: information needed to deserialize xml into - a dictionary. - :param xmlns: XML namespace to include with serialized xml - """ - super(XMLDictSerializer, self).__init__() - self.metadata = metadata or {} - self.xmlns = xmlns - - def default(self, data): - # We expect data to contain a single key which is the XML root. - root_key = data.keys()[0] - doc = minidom.Document() - node = self._to_xml_node(doc, self.metadata, root_key, data[root_key]) - - return self.to_xml_string(node) - - def to_xml_string(self, node, has_atom=False): - self._add_xmlns(node, has_atom) - return node.toprettyxml(indent=' ', encoding='UTF-8') - - #NOTE (ameade): the has_atom should be removed after all of the - # xml serializers and view builders have been updated to the current - # spec that required all responses include the xmlns:atom, the has_atom - # flag is to prevent current tests from breaking - def _add_xmlns(self, node, has_atom=False): - if self.xmlns is not None: - node.setAttribute('xmlns', self.xmlns) - if has_atom: - node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom") - - def _to_xml_node(self, doc, metadata, nodename, data): - """Recursive method to convert data members to XML nodes.""" - result = doc.createElement(nodename) - - # Set the xml namespace if one is specified - # TODO(justinsb): We could also use prefixes on the keys - xmlns = metadata.get('xmlns', None) - if xmlns: - result.setAttribute('xmlns', xmlns) - - #TODO(bcwaldon): accomplish this without a type-check - if type(data) is list: - collections = metadata.get('list_collections', {}) - if nodename in collections: - metadata = collections[nodename] - for item in data: - node = doc.createElement(metadata['item_name']) - node.setAttribute(metadata['item_key'], str(item)) - result.appendChild(node) - return result - singular = metadata.get('plurals', {}).get(nodename, None) - if singular is None: - if nodename.endswith('s'): - singular = nodename[:-1] - else: - singular = 'item' - for item in data: - node = self._to_xml_node(doc, metadata, singular, item) - result.appendChild(node) - #TODO(bcwaldon): accomplish this without a type-check - elif type(data) is dict: - collections = metadata.get('dict_collections', {}) - if nodename in collections: - metadata = collections[nodename] - for k, v in data.items(): - node = doc.createElement(metadata['item_name']) - node.setAttribute(metadata['item_key'], str(k)) - text = doc.createTextNode(str(v)) - node.appendChild(text) - result.appendChild(node) - return result - attrs = metadata.get('attributes', {}).get(nodename, {}) - for k, v in data.items(): - if k in attrs: - result.setAttribute(k, str(v)) - else: - node = self._to_xml_node(doc, metadata, k, v) - result.appendChild(node) - else: - # Type is atom - node = doc.createTextNode(str(data)) - result.appendChild(node) - return result - - def _create_link_nodes(self, xml_doc, links): - link_nodes = [] - for link in links: - link_node = xml_doc.createElement('atom:link') - link_node.setAttribute('rel', link['rel']) - link_node.setAttribute('href', link['href']) - if 'type' in link: - link_node.setAttribute('type', link['type']) - link_nodes.append(link_node) - return link_nodes - - -class ResponseHeadersSerializer(ActionDispatcher): - """Default response headers serialization""" - - def serialize(self, response, data, action): - self.dispatch(response, data, action=action) - - def default(self, response, data): - response.status_int = 200 - - -class ResponseSerializer(object): - """Encode the necessary pieces into a response object""" - - def __init__(self, body_serializers=None, headers_serializer=None): - self.body_serializers = { - 'application/xml': XMLDictSerializer(), - 'application/json': JSONDictSerializer(), - } - self.body_serializers.update(body_serializers or {}) - - self.headers_serializer = (headers_serializer or - ResponseHeadersSerializer()) - - def serialize(self, response_data, content_type, action='default'): - """Serialize a dict into a string and wrap in a wsgi.Request object. - - :param response_data: dict produced by the Controller - :param content_type: expected mimetype of serialized response body - - """ - response = webob.Response() - self.serialize_headers(response, response_data, action) - self.serialize_body(response, response_data, content_type, action) - return response - - def serialize_headers(self, response, data, action): - self.headers_serializer.serialize(response, data, action) - - def serialize_body(self, response, data, content_type, action): - response.headers['Content-Type'] = content_type - if data is not None: - serializer = self.get_body_serializer(content_type) - response.body = serializer.serialize(data, action) - - def get_body_serializer(self, content_type): - try: - return self.body_serializers[content_type] - except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) - - -class RequestHeadersDeserializer(ActionDispatcher): - """Default request headers deserializer""" - - def deserialize(self, request, action): - return self.dispatch(request, action=action) - - def default(self, request): - return {} - - -class RequestDeserializer(object): - """Break up a Request object into more useful pieces.""" - - def __init__(self, body_deserializers=None, headers_deserializer=None, - supported_content_types=None): - - self.supported_content_types = supported_content_types - - self.body_deserializers = { - 'application/xml': XMLDeserializer(), - 'application/json': JSONDeserializer(), - } - self.body_deserializers.update(body_deserializers or {}) - - self.headers_deserializer = (headers_deserializer or - RequestHeadersDeserializer()) - - def deserialize(self, request): - """Extract necessary pieces of the request. - - :param request: Request object - :returns: tuple of (expected controller action name, dictionary of - keyword arguments to pass to the controller, the expected - content type of the response) - - """ - action_args = self.get_action_args(request.environ) - action = action_args.pop('action', None) - - action_args.update(self.deserialize_headers(request, action)) - action_args.update(self.deserialize_body(request, action)) - - accept = self.get_expected_content_type(request) - - return (action, action_args, accept) - - def deserialize_headers(self, request, action): - return self.headers_deserializer.deserialize(request, action) - - def deserialize_body(self, request, action): - if not len(request.body) > 0: - LOG.debug(_("Empty body provided in request")) - return {} - - try: - content_type = request.get_content_type() - except exception.InvalidContentType: - LOG.debug(_("Unrecognized Content-Type provided in request")) - raise - - if content_type is None: - LOG.debug(_("No Content-Type provided in request")) - return {} - - try: - deserializer = self.get_body_deserializer(content_type) - except exception.InvalidContentType: - LOG.debug(_("Unable to deserialize body as provided Content-Type")) - raise - - return deserializer.deserialize(request.body, action) - - def get_body_deserializer(self, content_type): - try: - return self.body_deserializers[content_type] - except (KeyError, TypeError): - raise exception.InvalidContentType(content_type=content_type) - - def get_expected_content_type(self, request): - return request.best_match_content_type(self.supported_content_types) - - def get_action_args(self, request_environment): - """Parse dictionary created by routes library.""" - try: - args = request_environment['wsgiorg.routing_args'][1].copy() - except Exception: - return {} - - try: - del args['controller'] - except KeyError: - pass - - try: - del args['format'] - except KeyError: - pass - - return args - - -class TextDeserializer(ActionDispatcher): - """Default request body deserialization""" - - def deserialize(self, datastring, action='default'): - return self.dispatch(datastring, action=action) - - def default(self, datastring): - return {} - - -class JSONDeserializer(TextDeserializer): - - def _from_json(self, datastring): - try: - return jsonutils.loads(datastring) - except ValueError: - msg = _("cannot understand JSON") - raise exception.MalformedRequestBody(reason=msg) - - def default(self, datastring): - return {'body': self._from_json(datastring)} - - -class XMLDeserializer(TextDeserializer): - - def __init__(self, metadata=None): - """ - :param metadata: information needed to deserialize xml into - a dictionary. - """ - super(XMLDeserializer, self).__init__() - self.metadata = metadata or {} - - def _from_xml(self, datastring): - plurals = set(self.metadata.get('plurals', {})) - - try: - node = xmlutils.safe_minidom_parse_string(datastring).childNodes[0] - return {node.nodeName: self._from_xml_node(node, plurals)} - except expat.ExpatError: - msg = _("cannot understand XML") - raise exception.MalformedRequestBody(reason=msg) - - def _from_xml_node(self, node, listnames): - """Convert a minidom node to a simple Python type. - - :param listnames: list of XML node names whose subnodes should - be considered list items. - - """ - - if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3: - return node.childNodes[0].nodeValue - elif node.nodeName in listnames: - return [self._from_xml_node(n, listnames) for n in node.childNodes] - else: - result = dict() - for attr in node.attributes.keys(): - result[attr] = node.attributes[attr].nodeValue - for child in node.childNodes: - if child.nodeType != node.TEXT_NODE: - result[child.nodeName] = self._from_xml_node(child, - listnames) - return result - - def find_first_child_named(self, parent, name): - """Search a nodes children for the first child with a given name""" - for node in parent.childNodes: - if node.nodeName == name: - return node - return None - - def find_children_named(self, parent, name): - """Return all of a nodes children who have the given name""" - for node in parent.childNodes: - if node.nodeName == name: - yield node - - def extract_text(self, node): - """Get the text field contained by the given node""" - if len(node.childNodes) == 1: - child = node.childNodes[0] - if child.nodeType == child.TEXT_NODE: - return child.nodeValue - return "" - - def default(self, datastring): - return {'body': self._from_xml(datastring)} diff --git a/billingstack/openstack/common/xmlutils.py b/billingstack/openstack/common/xmlutils.py deleted file mode 100644 index 3370048..0000000 --- a/billingstack/openstack/common/xmlutils.py +++ /dev/null @@ -1,74 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2013 IBM -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from xml.dom import minidom -from xml.parsers import expat -from xml import sax -from xml.sax import expatreader - - -class ProtectedExpatParser(expatreader.ExpatParser): - """An expat parser which disables DTD's and entities by default.""" - - def __init__(self, forbid_dtd=True, forbid_entities=True, - *args, **kwargs): - # Python 2.x old style class - expatreader.ExpatParser.__init__(self, *args, **kwargs) - self.forbid_dtd = forbid_dtd - self.forbid_entities = forbid_entities - - def start_doctype_decl(self, name, sysid, pubid, has_internal_subset): - raise ValueError("Inline DTD forbidden") - - def entity_decl(self, entityName, is_parameter_entity, value, base, - systemId, publicId, notationName): - raise ValueError(" entity declaration forbidden") - - def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): - # expat 1.2 - raise ValueError(" unparsed entity forbidden") - - def external_entity_ref(self, context, base, systemId, publicId): - raise ValueError(" external entity forbidden") - - def notation_decl(self, name, base, sysid, pubid): - raise ValueError(" notation forbidden") - - def reset(self): - expatreader.ExpatParser.reset(self) - if self.forbid_dtd: - self._parser.StartDoctypeDeclHandler = self.start_doctype_decl - self._parser.EndDoctypeDeclHandler = None - if self.forbid_entities: - self._parser.EntityDeclHandler = self.entity_decl - self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl - self._parser.ExternalEntityRefHandler = self.external_entity_ref - self._parser.NotationDeclHandler = self.notation_decl - try: - self._parser.SkippedEntityHandler = None - except AttributeError: - # some pyexpat versions do not support SkippedEntity - pass - - -def safe_minidom_parse_string(xml_string): - """Parse an XML string using minidom safely. - - """ - try: - return minidom.parseString(xml_string, parser=ProtectedExpatParser()) - except sax.SAXParseException: - raise expat.ExpatError() diff --git a/billingstack/paths.py b/billingstack/paths.py deleted file mode 100644 index 8d84289..0000000 --- a/billingstack/paths.py +++ /dev/null @@ -1,68 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -from oslo.config import cfg - -path_opts = [ - cfg.StrOpt('pybasedir', - default=os.path.abspath(os.path.join(os.path.dirname(__file__), - '../')), - help='Directory where the nova python module is installed'), - cfg.StrOpt('bindir', - default='$pybasedir/bin', - help='Directory where nova binaries are installed'), - cfg.StrOpt('state_path', - default='$pybasedir', - help="Top-level directory for maintaining nova's state"), -] - -CONF = cfg.CONF -CONF.register_opts(path_opts) - - -def basedir_def(*args): - """Return an uninterpolated path relative to $pybasedir.""" - return os.path.join('$pybasedir', *args) - - -def bindir_def(*args): - """Return an uninterpolated path relative to $bindir.""" - return os.path.join('$bindir', *args) - - -def state_path_def(*args): - """Return an uninterpolated path relative to $state_path.""" - return os.path.join('$state_path', *args) - - -def basedir_rel(*args): - """Return a path relative to $pybasedir.""" - return os.path.join(CONF.pybasedir, *args) - - -def bindir_rel(*args): - """Return a path relative to $bindir.""" - return os.path.join(CONF.bindir, *args) - - -def state_path_rel(*args): - """Return a path relative to $state_path.""" - return os.path.join(CONF.state_path, *args) diff --git a/billingstack/payment_gateway/__init__.py b/billingstack/payment_gateway/__init__.py deleted file mode 100644 index a9dcf32..0000000 --- a/billingstack/payment_gateway/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from stevedore.extension import ExtensionManager - -from billingstack import exceptions -from billingstack.openstack.common import log -from billingstack.payment_gateway.base import Provider -from billingstack.storage.utils import get_connection - - -LOG = log.getLogger(__name__) - - -def _register(ep, context, conn): - provider = ep.plugin - - values = provider.values() - - LOG.debug("Attempting registration of PGP %s" % - ep.plugin.get_plugin_name()) - try: - methods = provider.methods() - except NotImplementedError: - msg = "PaymentGatewayProvider %s doesn't provide any methods - Skipped" - LOG.warn(msg, provider.get_plugin_name()) - return - values['methods'] = methods - try: - conn.pg_provider_register(context, values) - except exceptions.ConfigurationError: - return - - LOG.debug("Registered PGP %s with methods %s", values, methods) - - -def register_providers(context): - conn = get_connection('collector') - em = ExtensionManager(Provider.__plugin_ns__) - em.map(_register, context, conn) - - -def get_provider(name): - return Provider.get_plugin(name) diff --git a/billingstack/payment_gateway/base.py b/billingstack/payment_gateway/base.py deleted file mode 100644 index 31e4d1b..0000000 --- a/billingstack/payment_gateway/base.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.plugin import Plugin - - -class Provider(Plugin): - """ - Base API for Gateway Plugins. - """ - __plugin_ns__ = 'billingstack.payment_gateway' - __plugin_type__ = 'payment_gateway' - - __title__ = '' - __description__ = '' - - def __init__(self, config): - self.config = config - self.client = self.get_client() - - @classmethod - def methods(cls): - """ - The methods supported by the Provider - """ - raise NotImplementedError - - @classmethod - def properties(cls): - """ - Some extra data about the Provider if any, will be stored as - JSON in the DB - """ - return {} - - @classmethod - def values(cls): - """ - The values for this provider, used when registering in the catalog. - """ - return dict( - name=cls.get_plugin_name(), - title=cls.__title__, - description=cls.__description__, - properties=cls.properties()) - - def get_client(self): - """ - Return a Client - """ - raise NotImplementedError - - def verify_config(self): - """ - Verify a configuration. - - Raise ConfigurationError if invalid config. - """ - raise NotImplementedError - - def create_account(self, values): - """ - Create a new Account - - :param values: A Customer as dict - """ - raise NotImplementedError - - def get_account(self, id_): - """ - List Accounts - - :param id_: Account ID to get - """ - raise NotImplementedError - - def list_account(self): - """ - List Accounts - """ - raise NotImplementedError - - def delete_account(self, id_): - """ - Delete Account - - :param id_: Account ID to delete - """ - raise NotImplementedError - - def create_payment_method(self, account_id, values): - """ - Create a new Credit Card or similar - - :param account_d: The Account ID to add this PM to - :param values: Values to create the PM from - """ - raise NotImplementedError - - def get_payment_method(self, id_): - """ - Get a PaymentMethod - - :param id_: The ID of the PM to get - """ - raise NotImplementedError - - def list_payment_method(self, account_id): - """ - List PaymentMethods - - :param account_id: The Account ID to list Pms for - """ - raise NotImplementedError - - def delete_payment_method(self, id_): - """ - Delete a PaymentMethod - """ - raise NotImplementedError - - def transaction_add(self, account, values): - """ - Create a new Transaction - - :param account: The Account entity to create it on - :param values: Values to create it with - """ - raise NotImplementedError - - def transaction_get(self, id_): - """ - Get a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError - - def transaction_list(self): - """ - List Transactions - """ - raise NotImplementedError - - def transaction_settle(self, id_): - """ - Settle a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError - - def transaction_void(self, id_): - """ - Void a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError - - def transaction_refund(self, id_): - """ - Refund a Transaction - - :param id_: The ID of the Transaction - """ - raise NotImplementedError diff --git a/billingstack/payment_gateway/dummy.py b/billingstack/payment_gateway/dummy.py deleted file mode 100644 index 2896e44..0000000 --- a/billingstack/payment_gateway/dummy.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.payment_gateway.base import Provider - - -class DummyClient(object): - def __init__(self): - pass - - -class DummyProvider(Provider): - """ - A Stupid Provider that does nothing - """ - __plugin_name__ = 'dummy' - __title__ = 'Dummy Provider' - __description__ = 'Noop Dummy' - - @classmethod - def methods(cls): - return [ - {"name": "visa", "type": "creditcard"}] - - @classmethod - def properties(cls): - return {"enabled": 0} - - def get_client(self): - return DummyClient() - - def create_payment_method(self, account_id, values): - return True - - def verify_config(self): - return True diff --git a/billingstack/plugin.py b/billingstack/plugin.py deleted file mode 100644 index ee92afb..0000000 --- a/billingstack/plugin.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from stevedore import driver -from billingstack.openstack.common import log as logging - - -LOG = logging.getLogger(__name__) - - -class Plugin(object): - __plugin_ns__ = None - - __plugin_name__ = None - __plugin_type__ = None - - def __init__(self): - self.name = self.get_canonical_name() - LOG.debug("Loaded plugin %s", self.name) - - def is_enabled(self): - """ - Is this Plugin enabled? - - :retval: Boolean - """ - return True - - @classmethod - def get_plugin(cls, name, ns=None, invoke_on_load=False, - invoke_args=(), invoke_kwds={}): - """ - Load a plugin from namespace - """ - ns = ns or cls.__plugin_ns__ - if ns is None: - raise RuntimeError('No namespace provided or __plugin_ns__ unset') - - LOG.debug('Looking for plugin %s in %s', name, ns) - mgr = driver.DriverManager(ns, name) - - return mgr.driver(*invoke_args, **invoke_kwds) if invoke_on_load \ - else mgr.driver - - @classmethod - def get_canonical_name(cls): - """ - Return the plugin name - """ - type_ = cls.get_plugin_type() - name = cls.get_plugin_name() - return "%s:%s" % (type_, name) - - @classmethod - def get_plugin_name(cls): - return cls.__plugin_name__ - - @classmethod - def get_plugin_type(cls): - return cls.__plugin_type__ - - def start(self): - """ - Start this plugin - """ - - def stop(self): - """ - Stop this plugin from doing anything - """ diff --git a/billingstack/rater/__init__.py b/billingstack/rater/__init__.py deleted file mode 100644 index ef1989d..0000000 --- a/billingstack/rater/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -cfg.CONF.register_group(cfg.OptGroup( - name='service:rater', title="Configuration for Rating/Rater Service" -)) - -cfg.CONF.register_opts([ - cfg.IntOpt('workers', default=None, - help='Number of worker processes to spawn'), - cfg.StrOpt('storage-driver', default='sqlalchemy', - help='The storage driver to use'), -], group='service:rater') diff --git a/billingstack/rater/rpcapi.py b/billingstack/rater/rpcapi.py deleted file mode 100644 index 2e53c78..0000000 --- a/billingstack/rater/rpcapi.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo.config import cfg - -from billingstack.openstack.common.rpc import proxy - -rpcapi_opts = [ - cfg.StrOpt('rater_topic', default='rater', - help='the topic rater nodes listen on') -] - -cfg.CONF.register_opts(rpcapi_opts) - - -class RaterAPI(proxy.RpcProxy): - BASE_RPC_VERSION = '1.0' - - def __init__(self): - super(RaterAPI, self).__init__( - topic=cfg.CONF.rater_topic, - default_version=self.BASE_RPC_VERSION) - - # Subscriptions - def create_usage(self, ctxt, values): - return self.call(ctxt, self.make_msg('create_usage', values=values)) - - def list_usages(self, ctxt, criterion=None): - return self.call(ctxt, self.make_msg('list_usages', - criterion=criterion)) - - def get_usage(self, ctxt, id_): - return self.call(ctxt, self.make_msg('get_usage', id_=id_)) - - def update_usage(self, ctxt, id_, values): - return self.call(ctxt, self.make_msg('update_usage', id_=id_, - values=values)) - - def delete_usage(self, ctxt, id_): - return self.call(ctxt, self.make_msg('delete_usage', id_=id_)) - - -rater_api = RaterAPI() diff --git a/billingstack/rater/service.py b/billingstack/rater/service.py deleted file mode 100644 index 652a134..0000000 --- a/billingstack/rater/service.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import sys - -from oslo.config import cfg -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import service as os_service -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack.storage.utils import get_connection -from billingstack import service as bs_service - - -cfg.CONF.import_opt('rater_topic', 'billingstack.rater.rpcapi') -cfg.CONF.import_opt('host', 'billingstack.netconf') -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -LOG = logging.getLogger(__name__) - - -class Service(rpc_service.Service): - """ - The Usage / Rater / Rating service for BillingStack. - - This is a service that will receive events typically from a Mediator like - like Medjatur or the DUDE from Dreamhost that pushes data to the API which - casts to this service. - """ - def __init__(self, *args, **kwargs): - kwargs.update( - host=cfg.CONF.host, - topic=cfg.CONF.rater_topic, - ) - - super(Service, self).__init__(*args, **kwargs) - - def start(self): - self.storage_conn = get_connection('rater') - super(Service, self).start() - - def wait(self): - super(Service, self).wait() - self.conn.consumer_thread.wait() - - def create_usage(self, ctxt, values): - return self.storage_conn.create_usage(ctxt, values) - - def list_usages(self, ctxt, **kw): - return self.storage_conn.list_usages(ctxt, **kw) - - def get_usage(self, ctxt, id_): - return self.storage_conn.get_usage(ctxt, id_) - - def update_usage(self, ctxt, id_, values): - return self.storage_conn.update_usage(ctxt, id_, values) - - def delete_usage(self, ctxt, id_): - return self.storage_conn.delete_usage(ctxt, id_) - - -def launch(): - bs_service.prepare_service(sys.argv) - launcher = os_service.launch(Service(), - cfg.CONF['service:rater'].workers) - launcher.wait() diff --git a/billingstack/rater/storage/__init__.py b/billingstack/rater/storage/__init__.py deleted file mode 100644 index 6402efe..0000000 --- a/billingstack/rater/storage/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.storage import base - - -class StorageEngine(base.StorageEngine): - """Base class for the rater storage""" - __plugin_ns__ = 'billingstack.rater.storage' - - -class Connection(base.Connection): - """Define the base API for rater storage""" - def create_usage(self, ctxt, values): - raise NotImplementedError - - def list_usages(self, ctxt, **kw): - raise NotImplementedError - - def get_usage(self, ctxt, id_): - raise NotImplementedError - - def update_usage(self, ctxt, id_, values): - raise NotImplementedError - - def delete_usage(self, ctxt, id_): - raise NotImplementedError diff --git a/billingstack/rater/storage/impl_sqlalchemy.py b/billingstack/rater/storage/impl_sqlalchemy.py deleted file mode 100644 index 4ebac66..0000000 --- a/billingstack/rater/storage/impl_sqlalchemy.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -A Usage plugin using sqlalchemy... -""" -from oslo.config import cfg -from sqlalchemy import Column -from sqlalchemy import Unicode, Float, DateTime -from sqlalchemy.ext.declarative import declarative_base - -from billingstack.openstack.common import log as logging -from billingstack.rater.storage import Connection, StorageEngine -from billingstack.sqlalchemy.types import UUID -from billingstack.sqlalchemy import api, model_base, session - - -# DB SCHEMA -BASE = declarative_base(cls=model_base.ModelBase) - -LOG = logging.getLogger(__name__) - - -cfg.CONF.register_group(cfg.OptGroup( - name='rater:sqlalchemy', title='Config for rater sqlalchemy plugin')) - - -cfg.CONF.register_opts(session.SQLOPTS, group='rater:sqlalchemy') - - -class Usage(BASE, model_base.BaseMixin): - """ - A record of something that's used from for example a Metering system like - Ceilometer - """ - measure = Column(Unicode(255)) - start_timestamp = Column(DateTime) - end_timestamp = Column(DateTime) - - price = Column(Float) - total = Column(Float) - value = Column(Float) - merchant_id = Column(UUID) - product_id = Column(UUID, nullable=False) - subscription_id = Column(UUID, nullable=False) - - -class SQLAlchemyEngine(StorageEngine): - __plugin_name__ = 'sqlalchemy' - - def get_connection(self): - return Connection() - - -class Connection(Connection, api.HelpersMixin): - def __init__(self): - self.setup('rater:sqlalchemy') - - def base(self): - return BASE - - def create_usage(self, ctxt, values): - row = Usage(**values) - self._save(row) - return dict(row) - - def list_usages(self, ctxt, **kw): - return self._list(Usage, **kw) - - def get_usage(self, ctxt, id_): - return self._get(Usage, id_) - - def update_usage(self, ctxt, id_, values): - return self._update(Usage, id_, values) - - def delete_usage(self, ctxt, id_): - self._delete(Usage, id_) diff --git a/billingstack/samples.py b/billingstack/samples.py deleted file mode 100644 index 3e18e57..0000000 --- a/billingstack/samples.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import glob -import os.path - -from billingstack.openstack.common import jsonutils as json - - -DIR = os.path.join(os.path.dirname(__file__), 'samples_data') - - -def get_sample(name): - """ - Get a sample file .json, for example user.json - - :param name: The name of the sample type - """ - f = open('%s/%s.json' % (DIR, name)) - return json.loads(f.read()) - - -def get_samples(): - """ - Read the samples and return it as a dict where the filename is the key - """ - samples = {} - for f in glob.glob(DIR + '/*.json'): - name = os.path.basename(f)[:-(len(".json"))] - samples[name] = get_sample(name) - return samples diff --git a/billingstack/samples_data/contact_info.json b/billingstack/samples_data/contact_info.json deleted file mode 100644 index efbab3b..0000000 --- a/billingstack/samples_data/contact_info.json +++ /dev/null @@ -1,15 +0,0 @@ -[ - { - "first_name": "Mr Bill", - "last_name": "Biller", - "company": "Company X", - "address1": "SomeStreet 1", - "address2": "Apartment 10", - "locality": "Stavanger", - "region": "Rogaland", - "postal_code": "4000", - "country_name": "Norway", - "phone": "22 22 22 22", - "email": "bill.biller@comp-x.com" - } -] diff --git a/billingstack/samples_data/currency.json b/billingstack/samples_data/currency.json deleted file mode 100644 index 209d1c4..0000000 --- a/billingstack/samples_data/currency.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "name": "nok" - }, - { - "name": "sek" - } -] diff --git a/billingstack/samples_data/customer.json b/billingstack/samples_data/customer.json deleted file mode 100644 index 5e43d77..0000000 --- a/billingstack/samples_data/customer.json +++ /dev/null @@ -1,5 +0,0 @@ -[ - { - "name": "Customer X" - } -] diff --git a/billingstack/samples_data/fixtures/currencies_get_response.json b/billingstack/samples_data/fixtures/currencies_get_response.json deleted file mode 100644 index 82f4d07..0000000 --- a/billingstack/samples_data/fixtures/currencies_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33ce9cac2013ce9cb33e10002", - "letter": "usd", - "name": "US Dollar" -}, { - "id": "402881a33ce9cac2013ce9cb33f90003", - "letter": "eur", - "name": "Euro" -}] diff --git a/billingstack/samples_data/fixtures/currencies_post_request.json b/billingstack/samples_data/fixtures/currencies_post_request.json deleted file mode 100644 index 9e806cd..0000000 --- a/billingstack/samples_data/fixtures/currencies_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "letter" : "usd", - "name" : "US Dollar" -} diff --git a/billingstack/samples_data/fixtures/currencies_post_response.json b/billingstack/samples_data/fixtures/currencies_post_response.json deleted file mode 100644 index 5b5c510..0000000 --- a/billingstack/samples_data/fixtures/currencies_post_response.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "402881a33ce9cac2013ce9cb33e10002", - "letter": "usd", - "name": "US Dollar" -} diff --git a/billingstack/samples_data/fixtures/languages_get_response.json b/billingstack/samples_data/fixtures/languages_get_response.json deleted file mode 100644 index 03f29a3..0000000 --- a/billingstack/samples_data/fixtures/languages_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33ce9cac2013ce9cb32290000", - "letter": "en", - "name": "English" -}, { - "id": "402881a33ce9cac2013ce9cb32ae0001", - "letter": "es", - "name": "Spanish" -}] diff --git a/billingstack/samples_data/fixtures/languages_post_request.json b/billingstack/samples_data/fixtures/languages_post_request.json deleted file mode 100644 index 766740e..0000000 --- a/billingstack/samples_data/fixtures/languages_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "letter" : "en", - "name" : "English" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/languages_post_response.json b/billingstack/samples_data/fixtures/languages_post_response.json deleted file mode 100644 index ed949ce..0000000 --- a/billingstack/samples_data/fixtures/languages_post_response.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "402881a33ce9cac2013ce9cb32290000", - "letter": "en", - "name": "English" -} diff --git a/billingstack/samples_data/fixtures/merchant_products_get_response.json b/billingstack/samples_data/fixtures/merchant_products_get_response.json deleted file mode 100644 index b17a874..0000000 --- a/billingstack/samples_data/fixtures/merchant_products_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33cf4568b013cf45796360008", - "name": "instance:m1.tiny", - "title": "instance:m1.tiny" -}, { - "id": "402881a33cf4568b013cf45796510009", - "name": "instance:m1.small", - "title": "instance:m1.small" -}] diff --git a/billingstack/samples_data/fixtures/merchant_products_post_request.json b/billingstack/samples_data/fixtures/merchant_products_post_request.json deleted file mode 100644 index cb2d6c4..0000000 --- a/billingstack/samples_data/fixtures/merchant_products_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name" : "instance:m1.tiny", - "title" : "instance:m1.tiny" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchant_products_post_response.json b/billingstack/samples_data/fixtures/merchant_products_post_response.json deleted file mode 100644 index b4611bb..0000000 --- a/billingstack/samples_data/fixtures/merchant_products_post_response.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id" : "402881a33cf44515013cf4515fa50008" - "name" : "instance:m1.tiny", - "title" : "instance:m1.tiny" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchant_users_get_response.json b/billingstack/samples_data/fixtures/merchant_users_get_response.json deleted file mode 100644 index 446de3d..0000000 --- a/billingstack/samples_data/fixtures/merchant_users_get_response.json +++ /dev/null @@ -1,6 +0,0 @@ -[{ - "id": "402881a33cf42afd013cf42c156b0007", - "merchant": "402881a33cf42afd013cf42c13a30005", - "username": "luis", - "password": "secret0" -}] diff --git a/billingstack/samples_data/fixtures/merchant_users_post_request.json b/billingstack/samples_data/fixtures/merchant_users_post_request.json deleted file mode 100644 index bf78176..0000000 --- a/billingstack/samples_data/fixtures/merchant_users_post_request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "username": "luis", - "password": "secret0" -} diff --git a/billingstack/samples_data/fixtures/merchant_users_post_response.json b/billingstack/samples_data/fixtures/merchant_users_post_response.json deleted file mode 100644 index 72b9731..0000000 --- a/billingstack/samples_data/fixtures/merchant_users_post_response.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "id": "402881a33cf42afd013cf42c156b0007", - "merchant": "402881a33cf42afd013cf42c13a30005", - "username": "luis", - "password": "secret0" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchants_get_response.json b/billingstack/samples_data/fixtures/merchants_get_response.json deleted file mode 100644 index cbccc0c..0000000 --- a/billingstack/samples_data/fixtures/merchants_get_response.json +++ /dev/null @@ -1,13 +0,0 @@ -[{ - "id": "402881a33ce9cac2013ce9cb36380004", - "name": "billingstack", - "title": "BillingStack", - "language": "en", - "currency": "usd" -}, { - "id": "402881a33ce9cac2013ce9cb36950005", - "name": "openstackbiller", - "title": "OpenStack Biller", - "language": "es", - "currency": "eur" -}] diff --git a/billingstack/samples_data/fixtures/merchants_post_request.json b/billingstack/samples_data/fixtures/merchants_post_request.json deleted file mode 100644 index 6e41893..0000000 --- a/billingstack/samples_data/fixtures/merchants_post_request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name" : "billingstack", - "title" : "BillingStack", - "language" : "en", - "currency" : "usd" -} \ No newline at end of file diff --git a/billingstack/samples_data/fixtures/merchants_post_response.json b/billingstack/samples_data/fixtures/merchants_post_response.json deleted file mode 100644 index b3408da..0000000 --- a/billingstack/samples_data/fixtures/merchants_post_response.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "402881a33ce9cac2013ce9cb36380004", - "name": "billingstack", - "title": "BillingStack", - "language": "en", - "currency": "usd" -} diff --git a/billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json b/billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json deleted file mode 100644 index 9498cc9..0000000 --- a/billingstack/samples_data/fixtures/payment_gateway_providers_get_response.json +++ /dev/null @@ -1,9 +0,0 @@ -[{ - "id": "402881a33cf3fe47013cf404d3ac0004", - "title": "Braintree", - "description": "Braintree Payments", - "is_default": true, - "metadata": { - "key.1": "value.1" - } -}] diff --git a/billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json b/billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json deleted file mode 100644 index 8bef177..0000000 --- a/billingstack/samples_data/fixtures/payment_gateway_providers_post_request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name" : "braintree", - "title" : "Braintree", - "description" : "Braintree Payments", - "metadata" : { - "key.1": "value.1" - }, - "is_default" : true -} diff --git a/billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json b/billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json deleted file mode 100644 index 3ca6106..0000000 --- a/billingstack/samples_data/fixtures/payment_gateway_providers_post_response.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": "402881a33cf3fe47013cf404d3ac0004", - "title": "Braintree", - "description": "Braintree Payments", - "is_default": true, - "metadata": { - "key.1": "value.1" - } -} \ No newline at end of file diff --git a/billingstack/samples_data/invoice_state.json b/billingstack/samples_data/invoice_state.json deleted file mode 100644 index 1f3f7ba..0000000 --- a/billingstack/samples_data/invoice_state.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "name": "pending", - "title": "Pending", - "description": "The invoice is in Pending state." - } -] \ No newline at end of file diff --git a/billingstack/samples_data/language.json b/billingstack/samples_data/language.json deleted file mode 100644 index b47f39d..0000000 --- a/billingstack/samples_data/language.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "name": "nor" - }, - { - "name": "swe" - } -] diff --git a/billingstack/samples_data/merchant.json b/billingstack/samples_data/merchant.json deleted file mode 100644 index a17adf0..0000000 --- a/billingstack/samples_data/merchant.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - { - "name": "Merchant X", - "title": "Merchant" - } -] diff --git a/billingstack/samples_data/payment_method.json b/billingstack/samples_data/payment_method.json deleted file mode 100644 index e4686f7..0000000 --- a/billingstack/samples_data/payment_method.json +++ /dev/null @@ -1,8 +0,0 @@ -[ - { - "name": "Visa", - "identifier": "5105105105105100", - "expires": "05/2012", - "properties": {"cardholder": "Mr Holder", "cvv": "007"} - } -] diff --git a/billingstack/samples_data/pg_config.json b/billingstack/samples_data/pg_config.json deleted file mode 100644 index f3a93ff..0000000 --- a/billingstack/samples_data/pg_config.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - { - "name": "Braintree Config", - "properties" : {} - } -] diff --git a/billingstack/samples_data/pg_method.json b/billingstack/samples_data/pg_method.json deleted file mode 100644 index 366d737..0000000 --- a/billingstack/samples_data/pg_method.json +++ /dev/null @@ -1,20 +0,0 @@ -[ - { - "name": "visa", - "title": "Visa Credit Card", - "description": "Credit Card version of Visa", - "type": "creditcard" - }, - { - "name": "mastercard", - "title": "MasterCard", - "description": "Credit Card version of MasterCard", - "type": "creditcard" - }, - { - "name": "amex", - "title": "American Express Credit Card", - "description": "AMEX Card", - "type": "creditcard" - } -] diff --git a/billingstack/samples_data/pg_provider.json b/billingstack/samples_data/pg_provider.json deleted file mode 100644 index 0c2db64..0000000 --- a/billingstack/samples_data/pg_provider.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "name" : "dummy", - "title" : "Dummy Provider", - "description" : "Dummy integration provider" - } -] diff --git a/billingstack/samples_data/plan.json b/billingstack/samples_data/plan.json deleted file mode 100644 index 7ec4076..0000000 --- a/billingstack/samples_data/plan.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - { - "name": "Compute Server", - "title": "Compute Server that has vCPU and so on", - "properties": { - "random": 1 - } - } -] diff --git a/billingstack/samples_data/product.json b/billingstack/samples_data/product.json deleted file mode 100644 index 999d1df..0000000 --- a/billingstack/samples_data/product.json +++ /dev/null @@ -1,182 +0,0 @@ -[ - { - "name" : "instance", - "description" : "Duration of instance", - "properties" : { - "resource" : "instance_id", - "measure" : "unit", - "type" : "gauge" - } - }, - { - "name" : "memory", - "description" : "Volume of RAM in MB", - "properties" : { - "resource" : "instance_id", - "measure" : "mb", - "type" : "gauge" - } - }, - { - "name" : "vcpus", - "description" : "Number of VCPUs", - "properties" : { - "resource" : "instance_id", - "measure" : "vcpu", - "type" : "gauge" - } - }, - { - "name" : "root_disk_size", - "description" : "Size of root disk in GB", - "properties" : { - "resource" : "instance_id", - "measure" : "gb", - "type" : "gauge" - } - }, - { - "name" : "ephemeral_disk_size", - "description" : "Size of ephemeral disk in GB", - "properties" : { - "resource" : "instance_id", - "measure" : "gb", - "type" : "gauge" - } - }, - { - "name" : "disk.read.requests", - "description" : "Number of disk read requests", - "properties" : { - "resource" : "instance_id", - "measure" : "unit", - "type" : "cumulative" - } - }, - { - "name" : "disk.read.bytes", - "description" : "Volume of disk read in bytes", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "disk.write.requests", - "description" : "Number of disk write requests", - "properties" : { - "resource" : "instance_id", - "measure" : "unit", - "type" : "cumulative" - } - }, - { - "name" : "disk.write.bytes", - "description" : "Volume of disk write in bytes", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "cpu", - "description" : "CPU time used", - "properties" : { - "resource" : "seconds", - "measure" : "unit", - "type" : "cumulative" - } - }, - { - "name" : "network.incoming.bytes", - "description" : "number of incoming bytes on the network", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "network.outgoing.bytes", - "description" : "number of outgoing bytes on the network", - "properties" : { - "resource" : "instance_id", - "measure" : "bytes", - "type" : "cumulative" - } - }, - { - "name" : "network.incoming.packets", - "description" : "number of incoming packets", - "properties" : { - "resource" : "instance_id", - "measure" : "packets", - "type" : "cumulative" - } - }, - { - "name" : "network.outgoing.packets", - "description" : "number of outgoing packets", - "properties" : { - "resource" : "instance_id", - "measure" : "packets", - "type" : "cumulative" - } - }, - { - "name" : "image", - "description" : "Image polling -> it (still) exists", - "properties" : { - "resource" : "image_id", - "measure" : "unit", - "type" : "gauge" - } - }, - { - "name" : "image_size", - "description" : "Uploaded image size", - "properties" : { - "resource" : "image_id", - "measure" : "bytes", - "type" : "gauge" - } - }, - { - "name" : "image_download", - "description" : "Image is downloaded", - "properties" : { - "resource" : "image_id", - "measure" : "bytes", - "type" : "gauge" - } - }, - { - "name" : "image_serve", - "description" : "Image is served out", - "properties" : { - "resource" : "image_id", - "measure" : "bytes", - "type" : "gauge" - } - }, - { - "name" : "volume", - "description" : "Duration of volume", - "properties" : { - "resource" : "measure_id", - "measure" : "unit", - "type" : "gauge" - } - }, - { - "name" : "volume_size", - "description" : "Size of measure", - "properties" : { - "resource" : "measure_id", - "measure" : "gb", - "type" : "gauge" - } - } -] diff --git a/billingstack/samples_data/user.json b/billingstack/samples_data/user.json deleted file mode 100644 index 8044673..0000000 --- a/billingstack/samples_data/user.json +++ /dev/null @@ -1,6 +0,0 @@ -[ - { - "name": "demo", - "password": "secret" - } -] diff --git a/billingstack/service.py b/billingstack/service.py deleted file mode 100644 index f728a7a..0000000 --- a/billingstack/service.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Copyright © 2012 eNovance -# -# Author: Julien Danjou -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import eventlet -import sys - -from oslo.config import cfg -from billingstack.openstack.common import rpc -from billingstack.openstack.common import context -from billingstack.openstack.common import log -from billingstack.openstack.common.rpc import service as rpc_service -from billingstack import utils - - -cfg.CONF.register_opts([ - cfg.IntOpt('periodic_interval', - default=600, - help='seconds between running periodic tasks') -]) - -cfg.CONF.import_opt('host', 'billingstack.netconf') - - -class PeriodicService(rpc_service.Service): - - def start(self): - super(PeriodicService, self).start() - admin_context = context.RequestContext('admin', 'admin', is_admin=True) - self.tg.add_timer(cfg.CONF.periodic_interval, - self.manager.periodic_tasks, - context=admin_context) - - -def prepare_service(argv=[]): - eventlet.monkey_patch() - utils.read_config('billingstack', sys.argv) - - rpc.set_defaults(control_exchange='billingstack') - cfg.set_defaults(log.log_opts, - default_log_levels=['amqplib=WARN', - 'qpid.messaging=INFO', - 'sqlalchemy=WARN', - 'keystoneclient=INFO', - 'stevedore=INFO', - 'eventlet.wsgi.server=WARN' - ]) - cfg.CONF(argv[1:], project='billingstack') - log.setup('billingstack') diff --git a/billingstack/sqlalchemy/__init__.py b/billingstack/sqlalchemy/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/sqlalchemy/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/sqlalchemy/api.py b/billingstack/sqlalchemy/api.py deleted file mode 100644 index a9c44be..0000000 --- a/billingstack/sqlalchemy/api.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy.orm import exc - - -from billingstack import exceptions -from billingstack.openstack.common import log -from billingstack.sqlalchemy import model_base, session, utils -from billingstack.storage.filterer import BaseFilterer - - -LOG = log.getLogger(__name__) - - -class SQLAFilterer(BaseFilterer): - def apply_criteria(self, query, model): - """ - Apply the actual criterion in this filterer and return a query with - filters applied. - """ - for field, c in self.criterion.items(): - # NOTE: Try to get the column - try: - col_obj = getattr(model, field) - except AttributeError: - msg = '%s is not a valid field to query by' % field - raise exceptions.InvalidQueryField(msg) - - # NOTE: Handle a special operator - std_op = self.get_op(c.op) - if hasattr(self, c.op): - query = getattr(self, c.op)(c) - elif std_op: - query = query.filter(std_op(col_obj, c.value)) - elif c.op in ('%', 'like'): - query = query.filter(col_obj.like(c.value)) - elif c.op in ('!%', 'nlike'): - query = query.filter(col_obj.notlike(c.value)) - else: - msg = 'Invalid operator in criteria \'%s\'' % c - raise exceptions.InvalidOperator(msg) - - return query - - -class HelpersMixin(object): - def setup(self, config_group): - """ - Setup the Connection - - :param config_group: The config group to get the config from - """ - self.session = session.get_session(config_group) - self.engine = session.get_engine(config_group) - - def setup_schema(self): - """ Semi-Private Method to create the database schema """ - LOG.debug('Setting up schema') - base = self.base() - base.metadata.create_all(self.session.bind) - - def teardown_schema(self): - """ Semi-Private Method to reset the database schema """ - LOG.debug('Tearing down schema') - base = self.base() - base.metadata.drop_all(self.session.bind) - - def _save(self, row, save=True): - """ - Save a row. - - :param row: The row to save. - :param save: Save or just return a ref. - """ - if not save: - return row - - try: - row.save(self.session) - except exceptions.Duplicate: - raise - return row - - def _list(self, cls=None, query=None, criterion=None): - """ - A generic list/search helper method. - - Example criterion: - [{'field': 'id', 'op': 'eq', 'value': 'someid'}] - - :param cls: The model to try to delete - :param criterion: Criterion to match objects with - """ - if not cls and not query: - raise ValueError("Need either cls or query") - - query = query or self.session.query(cls) - - if criterion: - filterer = SQLAFilterer(criterion) - query = filterer.apply_criteria(query, cls) - - try: - result = query.all() - except exc.NoResultFound: - LOG.debug('No results found querying for %s: %s' % - (cls, criterion)) - return [] - else: - return result - - def _filter_id(self, cls, identifier, by_name): - """ - Apply filter for either id or name - - :param cls: The Model class. - :param identifier: The identifier of it. - :param by_name: By name. - """ - if hasattr(cls, 'id') and utils.is_valid_id(identifier): - return {'id': identifier} - elif hasattr(cls, 'name') and by_name: - return {'name': identifier} - else: - raise exceptions.NotFound('No criterias matched') - - def _get(self, cls, identifier=None, criterion=None, by_name=False): - """ - Get an instance of a Model matching ID - - :param cls: The model to try to get - :param identifier: The ID to get - :param by_name: Search by name as well as ID - """ - criterion_ = {} - - if identifier: - criterion_.update(self._filter_id(cls, identifier, by_name)) - - if isinstance(criterion, dict): - criterion_.update(criterion) - - query = self.session.query(cls) - - filterer = SQLAFilterer(criterion_) - query = filterer.apply_criteria(query, cls) - - try: - obj = query.one() - except exc.NoResultFound: - raise exceptions.NotFound(identifier) - return obj - - def _get_id_or_name(self, *args, **kw): - """ - Same as _get but with by_name on ass default - """ - kw['by_name'] = True - return self._get(*args, **kw) - - def _update(self, cls, id_, values, by_name=False): - """ - Update an instance of a Model matching an ID with values - - :param cls: The model to try to update - :param id_: The ID to update - :param values: The values to update the model instance with - """ - obj = self._get_id_or_name(cls, id_, by_name=by_name) - if 'id' in values and id_ != values['id']: - msg = 'Not allowed to change id' - errors = {'id': id_} - raise exceptions.InvalidObject(msg, errors=errors) - obj.update(values) - try: - obj.save(self.session) - except exceptions.Duplicate: - raise - return obj - - def _delete(self, cls, id_, by_name=False): - """ - Delete an instance of a Model matching an ID - - :param cls: The model to try to delete - :param id_: The ID to delete - """ - obj = self._get(cls, id_, by_name=by_name) - obj.delete(self.session) - - def _get_row(self, obj, cls=None, **kw): - """ - Used to either check that passed 'obj' is a ModelBase inheriting object - and just return it - - :param obj: ID or instance / ref of the object - :param cls: The class to run self._get on if obj is not a ref - """ - if isinstance(obj, model_base.ModelBase): - return obj - elif isinstance(obj, basestring) and cls: - return self._get(cls, obj) - else: - msg = 'Missing obj and/or obj and cls...' - raise exceptions.BadRequest(msg) - - def _make_rel_row(self, row, rel_attr, values): - """ - Get the class of the relation attribute in 'rel_attr' and make a - row from values with it. - - :param row: A instance of ModelBase - :param rel_attr: The relation attribute - :param values: The values to create the new row from - """ - cls = row.__mapper__.get_property(rel_attr).mapper.class_ - return cls(**values) - - def _dict(self, row, extra=[]): - data = dict(row) - for key in extra: - if isinstance(row[key], list): - data[key] = map(dict, row[key]) - else: - data[key] = dict(row[key]) - return data - - def _kv_rows(self, rows, key='name', func=lambda i: i): - """ - Return a Key, Value dict where the "key" will be the key and the row - as value - """ - data = {} - for row in rows: - if callable(key): - data_key = key(row) - else: - data_key = row[key] - data[data_key] = func(row) - return data diff --git a/billingstack/sqlalchemy/model_base.py b/billingstack/sqlalchemy/model_base.py deleted file mode 100644 index 46f339f..0000000 --- a/billingstack/sqlalchemy/model_base.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2012 Hewlett-Packard Development Company, L.P. -# -# Author: Patrick Galbraith -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from sqlalchemy import Column, DateTime, Unicode, UnicodeText -from sqlalchemy.exc import IntegrityError -from sqlalchemy.orm import object_mapper -from sqlalchemy.ext.hybrid import hybrid_property -from sqlalchemy.ext.declarative import declared_attr - -from billingstack import exceptions, utils -from billingstack.sqlalchemy.types import UUID -from billingstack.openstack.common.uuidutils import generate_uuid -from billingstack.openstack.common import timeutils - - -class ModelBase(object): - __abstract__ = True - __table_initialized__ = False - - @declared_attr - def __tablename__(cls): - return utils.capital_to_underscore(cls.__name__) - - def save(self, session): - """ Save this object """ - session.add(self) - - try: - session.flush() - except IntegrityError, e: - non_unique_strings = ( - 'duplicate entry', - 'not unique' - ) - - for non_unique_string in non_unique_strings: - if non_unique_string in str(e).lower(): - raise exceptions.Duplicate(str(e)) - - # Not a Duplicate error.. Re-raise. - raise - - def delete(self, session): - """ Delete this object """ - session.delete(self) - session.flush() - - def __setitem__(self, key, value): - setattr(self, key, value) - - def __getitem__(self, key): - return getattr(self, key) - - def __iter__(self): - columns = [i.name for i in iter(object_mapper(self).columns) - if not i.name.startswith('_')] - # NOTE(russellb): Allow models to specify other keys that can be looked - # up, beyond the actual db columns. An example would be the 'name' - # property for an Instance. - if hasattr(self, '_extra_keys'): - columns.extend(self._extra_keys()) - self._i = iter(columns) - return self - - def next(self): - n = self._i.next() - return n, getattr(self, n) - - def update(self, values): - """ Make the model object behave like a dict """ - for k, v in values.iteritems(): - setattr(self, k, v) - - def iteritems(self): - """ - Make the model object behave like a dict. - - Includes attributes from joins. - """ - local = dict(self) - joined = dict([(k, v) for k, v in self.__dict__.iteritems() - if not k[0] == '_']) - local.update(joined) - return local.iteritems() - - -class BaseMixin(object): - """ - A mixin that provides id, and some dates. - """ - id = Column(UUID, default=generate_uuid, primary_key=True) - created_at = Column(DateTime, default=timeutils.utcnow) - updated_at = Column(DateTime, onupdate=timeutils.utcnow) - - -TYPES = { - "float": float, - "str": unicode, - "unicode": unicode, - "int": int, - "bool": bool -} - - -class PropertyMixin(object): - """ - Helper mixin for Property classes. - - Store the type of the value using type() or the pre-defined data_type - and cast it on value when returning the value. - - Supported types are in the TYPES dict. - """ - id = Column(UUID, default=generate_uuid, primary_key=True) - data_type = Column(Unicode(20), nullable=False, default=u'str') - name = Column(Unicode(60), index=True, nullable=False) - _value = Column('value', UnicodeText) - - @hybrid_property - def value(self): - data_type = TYPES.get(self.data_type, str) - return data_type(self._value) - - @value.setter - def value(self, value): - data_type = type(value).__name__ - self.data_type = data_type - self._value = value diff --git a/billingstack/sqlalchemy/session.py b/billingstack/sqlalchemy/session.py deleted file mode 100644 index 338d586..0000000 --- a/billingstack/sqlalchemy/session.py +++ /dev/null @@ -1,250 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -"""Session Handling for SQLAlchemy backend.""" - -import re -import time - -import sqlalchemy -from sqlalchemy.exc import DisconnectionError, OperationalError -import sqlalchemy.orm -from sqlalchemy.pool import NullPool, StaticPool - -from oslo.config import cfg -from billingstack.openstack.common import lockutils -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.gettextutils import _ - -LOG = logging.getLogger(__name__) - -_MAKERS = {} -_ENGINES = {} - - -SQLOPTS = [ - cfg.StrOpt('database_connection', - default='sqlite:///$state_path/billingstack.sqlite', - help='The database driver to use'), - cfg.IntOpt('connection_debug', default=0, - help='Verbosity of SQL debugging information. 0=None,' - ' 100=Everything'), - cfg.BoolOpt('connection_trace', default=False, - help='Add python stack traces to SQL as comment strings'), - cfg.BoolOpt('sqlite_synchronous', default=True, - help='If passed, use synchronous mode for sqlite'), - cfg.IntOpt('idle_timeout', default=3600, - help='timeout before idle sql connections are reaped'), - cfg.IntOpt('max_retries', default=10, - help='maximum db connection retries during startup. ' - '(setting -1 implies an infinite retry count)'), - cfg.IntOpt('retry_interval', default=10, - help='interval between retries of opening a sql connection') -] - - -@lockutils.synchronized('session', 'billingstack-') -def get_session(config_group, - autocommit=True, - expire_on_commit=False, - autoflush=True): - """Return a SQLAlchemy session.""" - global _MAKERS - - if config_group not in _MAKERS: - engine = get_engine(config_group) - _MAKERS[config_group] = get_maker(engine, - autocommit, - expire_on_commit, - autoflush) - - session = _MAKERS[config_group]() - return session - - -def pragma_fks(dbapi_conn, connection_rec): - dbapi_conn.execute('pragma foreign_keys=ON') - - -def synchronous_switch_listener(dbapi_conn, connection_rec): - """Switch sqlite connections to non-synchronous mode""" - dbapi_conn.execute("PRAGMA synchronous = OFF") - - -def add_regexp_listener(dbapi_con, con_record): - """Add REGEXP function to sqlite connections.""" - - def regexp(expr, item): - reg = re.compile(expr) - return reg.search(unicode(item)) is not None - dbapi_con.create_function('regexp', 2, regexp) - - -def ping_listener(dbapi_conn, connection_rec, connection_proxy): - """ - Ensures that MySQL connections checked out of the - pool are alive. - - Borrowed from: - http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f - """ - try: - dbapi_conn.cursor().execute('select 1') - except dbapi_conn.OperationalError, ex: - if ex.args[0] in (2006, 2013, 2014, 2045, 2055): - LOG.warn('Got mysql server has gone away: %s', ex) - raise DisconnectionError("Database server went away") - else: - raise - - -def is_db_connection_error(args): - """Return True if error in connecting to db.""" - # NOTE(adam_g): This is currently MySQL specific and needs to be extended - # to support Postgres and others. - conn_err_codes = ('2002', '2003', '2006') - for err_code in conn_err_codes: - if args.find(err_code) != -1: - return True - return False - - -def get_engine(config_group): - """Return a SQLAlchemy engine.""" - global _ENGINES - - database_connection = cfg.CONF[config_group].database_connection - - if config_group not in _ENGINES: - connection_dict = sqlalchemy.engine.url.make_url( - database_connection) - - engine_args = { - "pool_recycle": cfg.CONF[config_group].idle_timeout, - "echo": False, - 'convert_unicode': True, - } - - # Map our SQL debug level to SQLAlchemy's options - if cfg.CONF[config_group].connection_debug >= 100: - engine_args['echo'] = 'debug' - elif cfg.CONF[config_group].connection_debug >= 50: - engine_args['echo'] = True - - if "sqlite" in connection_dict.drivername: - engine_args["poolclass"] = NullPool - - if database_connection == "sqlite://": - engine_args["poolclass"] = StaticPool - engine_args["connect_args"] = {'check_same_thread': False} - - _ENGINES[config_group] = sqlalchemy.create_engine(database_connection, - **engine_args) - - if 'mysql' in connection_dict.drivername: - sqlalchemy.event.listen(_ENGINES[config_group], - 'checkout', - ping_listener) - elif "sqlite" in connection_dict.drivername: - if not cfg.CONF[config_group].sqlite_synchronous: - sqlalchemy.event.listen(_ENGINES[config_group], - 'connect', - synchronous_switch_listener) - sqlalchemy.event.listen(_ENGINES[config_group], - 'connect', - add_regexp_listener) - sqlalchemy.event.listen(_ENGINES[config_group], - 'connect', pragma_fks) - - if (cfg.CONF[config_group].connection_trace and - _ENGINES[config_group].dialect.dbapi.__name__ == 'MySQLdb'): - import MySQLdb.cursors - _do_query = debug_mysql_do_query() - setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query) - - try: - _ENGINES[config_group].connect() - except OperationalError, e: - if not is_db_connection_error(e.args[0]): - raise - - remaining = cfg.CONF[config_group].max_retries - if remaining == -1: - remaining = 'infinite' - while True: - msg = _('SQL connection failed. %s attempts left.') - LOG.warn(msg % remaining) - if remaining != 'infinite': - remaining -= 1 - time.sleep(cfg.CONF[config_group].retry_interval) - try: - _ENGINES[config_group].connect() - break - except OperationalError, e: - if (remaining != 'infinite' and remaining == 0) or \ - not is_db_connection_error(e.args[0]): - raise - return _ENGINES[config_group] - - -def get_maker(engine, autocommit=True, expire_on_commit=False, autoflush=True): - """Return a SQLAlchemy sessionmaker using the given engine.""" - return sqlalchemy.orm.sessionmaker(bind=engine, - autocommit=autocommit, - autoflush=autoflush, - expire_on_commit=expire_on_commit) - - -def debug_mysql_do_query(): - """Return a debug version of MySQLdb.cursors._do_query""" - import MySQLdb.cursors - import traceback - - old_mysql_do_query = MySQLdb.cursors.BaseCursor._do_query - - def _do_query(self, q): - stack = '' - for file, line, method, function in traceback.extract_stack(): - # exclude various common things from trace - if file.endswith('session.py') and method == '_do_query': - continue - if file.endswith('api.py') and method == 'wrapper': - continue - if file.endswith('utils.py') and method == '_inner': - continue - if file.endswith('exception.py') and method == '_wrap': - continue - # nova/db/api is just a wrapper around nova/db/sqlalchemy/api - if file.endswith('nova/db/api.py'): - continue - # only trace inside nova - index = file.rfind('nova') - if index == -1: - continue - stack += "File:%s:%s Method:%s() Line:%s | " \ - % (file[index:], line, method, function) - - # strip trailing " | " from stack - if stack: - stack = stack[:-3] - qq = "%s /* %s */" % (q, stack) - else: - qq = q - old_mysql_do_query(self, qq) - - # return the new _do_query method - return _do_query diff --git a/billingstack/sqlalchemy/types.py b/billingstack/sqlalchemy/types.py deleted file mode 100644 index 123ae5e..0000000 --- a/billingstack/sqlalchemy/types.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Coped: Moniker -from sqlalchemy.types import TypeDecorator, CHAR, VARCHAR, UnicodeText -from sqlalchemy.dialects.postgresql import UUID as pgUUID -from sqlalchemy.dialects.postgresql import INET as pgINET -import uuid - - -from billingstack.openstack.common import jsonutils - - -class UUID(TypeDecorator): - """Platform-independent UUID type. - - Uses Postgresql's UUID type, otherwise uses - CHAR(32), storing as stringified hex values. - - Copied verbatim from SQLAlchemy documentation. - """ - impl = CHAR - - def load_dialect_impl(self, dialect): - if dialect.name == 'postgresql': - return dialect.type_descriptor(pgUUID()) - else: - return dialect.type_descriptor(CHAR(32)) - - def process_bind_param(self, value, dialect): - if value is None: - return value - elif dialect.name == 'postgresql': - return str(value) - else: - if not isinstance(value, uuid.UUID): - return "%.32x" % uuid.UUID(value) - else: - # hexstring - return "%.32x" % value - - def process_result_value(self, value, dialect): - if value is None: - return value - else: - return str(uuid.UUID(value)) - - -class Inet(TypeDecorator): - impl = VARCHAR - - def load_dialect_impl(self, dialect): - if dialect.name == "postgresql": - return pgINET() - else: - return VARCHAR(39) # IPv6 can be up to 39 chars - - def process_bind_param(self, value, dialect): - if value is None: - return value - else: - return str(value) - - -# Special Fields -class JSON(TypeDecorator): - - impl = UnicodeText - - def process_bind_param(self, value, dialect): - return jsonutils.dumps(value) - - def process_result_value(self, value, dialect): - return jsonutils.loads(value) diff --git a/billingstack/sqlalchemy/utils.py b/billingstack/sqlalchemy/utils.py deleted file mode 100644 index e8ad070..0000000 --- a/billingstack/sqlalchemy/utils.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from sqlalchemy.orm.properties import ColumnProperty, RelationshipProperty -from billingstack.openstack.common import uuidutils - - -def get_prop_dict(obj): - return dict([(p.key, p) for p in obj.__mapper__.iterate_properties]) - - -def get_prop_names(obj, exclude=[]): - props = get_prop_dict(obj) - - local, remote = [], [] - for k, p in props.items(): - if k not in exclude: - if isinstance(p, ColumnProperty): - local.append(k) - if isinstance(p, RelationshipProperty): - remote.append(k) - return local, remote - - -def is_valid_id(id_): - """ - Return true if this is a valid ID for the cls.id - """ - if uuidutils.is_uuid_like(id_) or isinstance(id_, int): - return True - else: - return False - - -def filter_merchant_by_join(query, cls, criterion, pop=True): - if criterion and 'merchant_id' in criterion: - if not hasattr(cls, 'merchant_id'): - raise RuntimeError('No merchant_id attribute on %s' % cls) - - query = query.join(cls).filter( - cls.merchant_id == criterion['merchant_id']) - - if pop: - criterion.pop('merchant_id') - - return query diff --git a/billingstack/storage/__init__.py b/billingstack/storage/__init__.py deleted file mode 100644 index f7ed5c6..0000000 --- a/billingstack/storage/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. diff --git a/billingstack/storage/base.py b/billingstack/storage/base.py deleted file mode 100644 index 9d09d06..0000000 --- a/billingstack/storage/base.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -from billingstack.plugin import Plugin - - -class StorageEngine(Plugin): - """ Base class for storage engines """ - __plugin_type__ = 'storage' - - def get_connection(self): - """ - Return a Connection instance based on the configuration settings. - """ - raise NotImplementedError - - -class Connection(object): - """ - A Connection - """ - def ping(self, context): - """ Ping the Storage connection """ - return { - 'status': None - } diff --git a/billingstack/storage/filterer.py b/billingstack/storage/filterer.py deleted file mode 100644 index f04b5bc..0000000 --- a/billingstack/storage/filterer.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack import exceptions -from billingstack.openstack.common import log - -import operator - -LOG = log.getLogger(__name__) - - -class Criteria(object): - """ - An object to hold Criteria - """ - def __init__(self, field, op, value): - self.field = field - self.op = op - self.value = value - - @classmethod - def from_dict(cls, data): - return cls(**data) - - def __str__(self): - return u'Field: %s, Operation: %s, Value: %s' % ( - self.field, self.op, self.value) - - -class BaseFilterer(object): - """ - Object to help with Filtering. - - Typical use cases include turning a dict into useful storage backend query - filters. - """ - - std_op = [ - (('eq', '==', '='), operator.eq), - (('ne', '!='), operator.ne), - (('ge', '>='), operator.ge), - (('le', '<='), operator.le), - (('gt', '>'), operator.gt), - (('le', '<'), operator.lt) - ] - - def __init__(self, criterion, **kw): - #: Criterion to apply - self.criterion = self.load_criterion(criterion) - - def get_op(self, op_key): - """ - Get the operator. - - :param op_key: The operator key as string. - """ - for op_keys, op in self.std_op: - if op_key in op_keys: - return op - - def load_criterion(self, criterion): - """ - Transform a dict with key values to a filter compliant list of dicts. - - :param criterion: The criterion dict. - """ - if not isinstance(criterion, dict): - msg = 'Criterion needs to be a dict.' - LOG.debug(msg) - raise exceptions.InvalidObject(msg) - - data = {} - for key, value in criterion.items(): - # NOTE: Criteria that doesn't have a OP defaults to eq and handle - # dicts - if isinstance(value, basestring): - c = Criteria(key, 'eq', value) - elif isinstance(value, dict): - c = Criteria.from_dict(value) - data[key] = c - return data diff --git a/billingstack/storage/utils.py b/billingstack/storage/utils.py deleted file mode 100644 index 4f55333..0000000 --- a/billingstack/storage/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo.config import cfg -from billingstack.openstack.common import importutils - - -def import_service_opts(service): - cfg.CONF.import_opt('storage_driver', 'billingstack.%s.storage' % service, - group='service:%s' % service) - cfg.CONF.import_opt('database_connection', - 'billingstack.%s.storage.impl_sqlalchemy' % service, - group='%s:sqlalchemy' % service) - - -def get_engine(service_name, driver_name): - """ - Return the engine class from the provided engine name - """ - path = 'billingstack.%s.storage.StorageEngine' % service_name - base = importutils.import_class(path) - return base.get_plugin(driver_name, invoke_on_load=True) - - -def get_connection(service_name, driver_name=None, import_opts=True): - """ - Return a instance of a storage connection - """ - if import_opts: - import_service_opts(service_name) - - driver_name = driver_name or \ - cfg.CONF['service:%s' % service_name].storage_driver - engine = get_engine(service_name, driver_name) - return engine.get_connection() diff --git a/billingstack/tasks.py b/billingstack/tasks.py deleted file mode 100644 index f1f30a5..0000000 --- a/billingstack/tasks.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from taskflow import task - -from billingstack.openstack.common import log -from billingstack.openstack.common.gettextutils import _ - - -LOG = log.getLogger(__name__) - - -def _make_task_name(cls, prefix=None, addons=None): - prefix = prefix or 'default' - components = [cls.__module__, cls.__name__] - if addons: - for a in addons: - components.append(str(a)) - return "%s:%s" % (prefix, ".".join(components)) - - -def _attach_debug_listeners(flow): - """Sets up a nice set of debug listeners for the flow. - - These listeners will log when tasks/flows are transitioning from state to - state so that said states can be seen in the debug log output which is very - useful for figuring out where problems are occuring. - """ - - def flow_log_change(state, details): - LOG.debug(_("%(flow)s has moved into state %(state)s from state" - " %(old_state)s") % {'state': state, - 'old_state': details.get('old_state'), - 'flow': details['flow']}) - - def task_log_change(state, details): - LOG.debug(_("%(flow)s has moved %(runner)s into state %(state)s with" - " result: %(result)s") % {'state': state, - 'flow': details['flow'], - 'runner': details['runner'], - 'result': details.get('result')}) - - # Register * for all state changes (and not selective state changes to be - # called upon) since all the changes is more useful. - flow.notifier.register('*', flow_log_change) - flow.task_notifier.register('*', task_log_change) - return flow - - -class RootTask(task.Task): - def __init__(self, name=None, prefix=None, addons=None, **kw): - name = name or _make_task_name(self.__class__, prefix=prefix, - addons=addons) - super(RootTask, self).__init__(name, **kw) diff --git a/billingstack/tests/__init__.py b/billingstack/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/api/__init__.py b/billingstack/tests/api/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/api/base.py b/billingstack/tests/api/base.py deleted file mode 100644 index 5dace2d..0000000 --- a/billingstack/tests/api/base.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Base classes for API tests. -""" -import pecan.testing - -from billingstack.openstack.common import jsonutils as json -from billingstack.openstack.common import log -from billingstack.tests.base import ServiceTestCase - - -LOG = log.getLogger(__name__) - - -class APITestMixin(object): - PATH_PREFIX = None - - path = None - - def item_path(self, *args): - url = self.path + '/%s' - return url % args - - def _ensure_slash(self, path): - if not path.startswith('/'): - path = '/' + path - return path - - def make_path(self, path): - path = self._ensure_slash(path) - if self.PATH_PREFIX: - path = self._ensure_slash(self.PATH_PREFIX) + path - return path - - def _query(self, queries): - query_params = {'q.field': [], - 'q.value': [], - 'q.op': [], - } - for query in queries: - for name in ['field', 'op', 'value']: - query_params['q.%s' % name].append(query.get(name, '')) - return query_params - - def _params(self, params, queries): - all_params = {} - all_params.update(params) - if queries: - all_params.update(self._query(queries)) - return all_params - - def get(self, path, headers=None, q=[], status_code=200, - content_type="application/json", **params): - path = self.make_path(path) - all_params = self._params(params, q) - - LOG.debug('GET: %s %r', path, all_params) - - response = self.app.get( - path, - params=all_params, - headers=headers) - - LOG.debug('GOT RESPONSE: %s', response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def post(self, path, data, headers=None, content_type="application/json", - q=[], status_code=202): - path = self.make_path(path) - - LOG.debug('POST: %s %s', path, data) - - content = json.dumps(data) - response = self.app.post( - path, - content, - content_type=content_type, - headers=headers) - - LOG.debug('POST RESPONSE: %r' % response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def put(self, path, data, headers=None, content_type="application/json", - q=[], status_code=202, **params): - path = self.make_path(path) - - LOG.debug('PUT: %s %s', path, data) - - content = json.dumps(data) - response = self.app.put( - path, - content, - content_type=content_type, - headers=headers) - - LOG.debug('PUT RESPONSE: %r' % response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def patch_(self, path, data, headers=None, content_type="application/json", - q=[], status_code=200, **params): - path = self.make_path(path) - - LOG.debug('PUT: %s %s', path, data) - - content = json.dumps(data) - response = self.app.patch( - path, - content, - content_type=content_type, - headers=headers) - - LOG.debug('PATCH RESPONSE: %r', response.body) - - self.assertEqual(response.status_code, status_code) - - return response - - def delete(self, path, status_code=204, headers=None, q=[], **params): - path = self.make_path(path) - all_params = self._params(params, q) - - LOG.debug('DELETE: %s %r', path, all_params) - - response = self.app.delete(path, params=all_params) - - self.assertEqual(response.status_code, status_code) - - return response - - -class FunctionalTest(ServiceTestCase, APITestMixin): - """ - billingstack.api base test - """ - - def setUp(self): - super(FunctionalTest, self).setUp() - - # NOTE: Needs to be started after the db schema is created - self.start_storage('central') - self.start_service('central') - - self.start_storage('collector') - self.start_service('collector') - self.setSamples() - - self.app = self.make_app() - - def make_app(self): - self.config = { - 'app': { - 'root': 'billingstack.api.v2.controllers.root.RootController', - 'modules': ['billingstack.api'], - } - } - return pecan.testing.load_test_app(self.config) diff --git a/billingstack/tests/api/v2/__init__.py b/billingstack/tests/api/v2/__init__.py deleted file mode 100644 index 40d04f0..0000000 --- a/billingstack/tests/api/v2/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from billingstack.tests.api.base import FunctionalTest - - -class V2Test(FunctionalTest): - PATH_PREFIX = '/v2' diff --git a/billingstack/tests/api/v2/test_currency.py b/billingstack/tests/api/v2/test_currency.py deleted file mode 100644 index cdbd814..0000000 --- a/billingstack/tests/api/v2/test_currency.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific currency governing permissions and limitations -# under the License. -""" -Test Currency -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestCurrency(V2Test): - __test__ = True - path = "currencies" - - def test_create_currency(self): - fixture = self.get_fixture('currency', fixture=1) - - resp = self.post(self.path, fixture) - - self.assertData(fixture, resp.json) - - def test_list_currencies(self): - - resp = self.get(self.path) - - self.assertLen(1, resp.json) - - def test_get_currency(self): - _, currency = self.create_currency(fixture=1) - - url = self.item_path(currency['name']) - resp = self.get(url) - - self.assertData(resp.json, currency) - - def test_update_currency(self): - _, currency = self.create_currency(fixture=1) - - url = self.item_path(currency['name']) - resp = self.patch_(url, currency) - - self.assertData(resp.json, currency) - - def test_delete_currency(self): - _, currency = self.create_currency(fixture=1) - - url = self.item_path(currency['name']) - self.delete(url) - - data = self.services.central.list_currencies(self.admin_ctxt) - self.assertLen(1, data) diff --git a/billingstack/tests/api/v2/test_customer.py b/billingstack/tests/api/v2/test_customer.py deleted file mode 100644 index 791a3c5..0000000 --- a/billingstack/tests/api/v2/test_customer.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Customers. -""" - -from billingstack.tests.api.v2 import V2Test -from billingstack.api.v2.models import Customer - - -class TestCustomer(V2Test): - __test__ = True - path = "merchants/%s/customers" - - def fixture(self): - fixture = self.get_fixture('customer') - self._account_defaults(fixture) - expected = Customer.from_db(fixture).as_dict() - return expected - - def test_create_customer(self): - expected = self.fixture() - - url = self.path % self.merchant['id'] - - resp = self.post(url, expected) - - self.assertData(expected, resp.json) - - def test_list_customers(self): - url = self.path % self.merchant['id'] - - resp = self.get(url) - self.assertLen(0, resp.json) - - self.create_customer(self.merchant['id']) - - resp = self.get(url) - self.assertLen(1, resp.json) - - def test_get_customer(self): - _, customer = self.create_customer(self.merchant['id']) - - expected = Customer.from_db(customer).as_dict() - - url = self.item_path(self.merchant['id'], customer['id']) - resp = self.get(url) - - self.assertData(expected, resp.json) - - def test_update_customer(self): - _, customer = self.create_customer(self.merchant['id']) - - expected = Customer.from_db(customer).as_dict() - - expected['name'] = 'test' - - url = self.item_path(self.merchant['id'], customer['id']) - resp = self.patch_(url, customer) - - self.assertData(resp.json, customer) - - def test_delete_customer(self): - _, customer = self.create_customer(self.merchant['id']) - - url = self.item_path(self.merchant['id'], customer['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_customers( - self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_invoice_state.py b/billingstack/tests/api/v2/test_invoice_state.py deleted file mode 100644 index c1d3672..0000000 --- a/billingstack/tests/api/v2/test_invoice_state.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test InvoiceState -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestInvoiceState(V2Test): - __test__ = True - path = "invoice_states" - - def setUp(self): - super(TestInvoiceState, self).setUp() - self.start_storage('biller') - self.start_service('biller') - - def test_create_invoice_state(self): - fixture = self.get_fixture('invoice_state') - - resp = self.post(self.path, fixture) - - self.assertData(fixture, resp.json) - - def test_list_invoice_states(self): - self.create_invoice_state() - - resp = self.get(self.path) - - self.assertLen(1, resp.json) - - def test_get_invoice_state(self): - _, state = self.create_invoice_state() - - url = self.item_path(state['name']) - resp = self.get(url) - - self.assertData(resp.json, state) - - def test_update_invoice_state(self): - _, state = self.create_invoice_state() - - url = self.item_path(state['name']) - resp = self.patch_(url, state) - - self.assertData(resp.json, state) - - def test_delete_invoice_state(self): - _, state = self.create_invoice_state() - - url = self.item_path(state['name']) - self.delete(url) - - data = self.services.biller.list_invoice_states(self.admin_ctxt) - self.assertLen(0, data) diff --git a/billingstack/tests/api/v2/test_language.py b/billingstack/tests/api/v2/test_language.py deleted file mode 100644 index 6e60e7d..0000000 --- a/billingstack/tests/api/v2/test_language.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Language -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestLanguage(V2Test): - __test__ = True - path = "languages" - - def test_create_language(self): - fixture = self.get_fixture('language', fixture=1) - - resp = self.post(self.path, fixture) - - self.assertData(fixture, resp.json) - - def test_list_languages(self): - - resp = self.get(self.path) - - self.assertLen(1, resp.json) - - def test_get_language(self): - _, language = self.create_language(fixture=1) - - url = self.item_path(language['name']) - resp = self.get(url) - - self.assertData(resp.json, language) - - def test_update_language(self): - _, language = self.create_language(fixture=1) - - url = self.item_path(language['name']) - resp = self.patch_(url, language) - - self.assertData(resp.json, language) - - def test_delete_language(self): - _, language = self.create_language(fixture=1) - - url = self.item_path(language['name']) - self.delete(url) - - data = self.services.central.list_languages(self.admin_ctxt) - self.assertLen(1, data) diff --git a/billingstack/tests/api/v2/test_merchant.py b/billingstack/tests/api/v2/test_merchant.py deleted file mode 100644 index 419a65f..0000000 --- a/billingstack/tests/api/v2/test_merchant.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Merchants -""" - -from billingstack.tests.api.v2 import V2Test -from billingstack.api.v2.models import Merchant - - -class TestMerchant(V2Test): - __test__ = True - - def fixture(self): - fixture = self.get_fixture('merchant') - self._account_defaults(fixture) - expected = Merchant.from_db(fixture).as_dict() - return expected - - def test_create_merchant(self): - expected = self.fixture() - - resp = self.post('merchants', expected) - - self.assertData(expected, resp.json) - - def test_list_merchants(self): - resp = self.get('merchants') - self.assertLen(1, resp.json) - - def test_get_merchant(self): - expected = Merchant.from_db(self.merchant).as_dict() - - resp = self.get('merchants/' + self.merchant['id']) - - self.assertData(expected, resp.json) - - def test_update_merchant(self): - expected = Merchant.from_db(self.merchant).as_dict() - - resp = self.patch_('merchants/' + self.merchant['id'], expected) - - self.assertData(expected, resp.json) - - def test_delete_merchant(self): - self.delete('merchants/' + self.merchant['id']) - self.assertLen(0, self.services.central.list_merchants( - self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_payment_method.py b/billingstack/tests/api/v2/test_payment_method.py deleted file mode 100644 index cf3849e..0000000 --- a/billingstack/tests/api/v2/test_payment_method.py +++ /dev/null @@ -1,105 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Products -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestPaymentMethod(V2Test): - __test__ = True - path = "merchants/%s/customers/%s/payment_methods" - - def setUp(self): - super(TestPaymentMethod, self).setUp() - self.start_storage('collector') - self.start_service('collector') - _, self.provider = self.pg_provider_register() - - _, self.customer = self.create_customer(self.merchant['id']) - - values = { - 'provider_id': self.provider['id'], - 'merchant_id': self.merchant['id']} - _, self.pg_config = self.create_pg_config(values=values) - - def test_create_payment_method(self): - fixture = self.get_fixture('payment_method') - fixture['provider_config_id'] = self.pg_config['id'] - - url = self.path % (self.merchant['id'], self.customer['id']) - - resp = self.post(url, fixture) - - self.assertData(fixture, resp.json) - - def test_list_payment_methods(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - self.create_payment_method(values=values) - - url = self.path % (self.merchant['id'], self.customer['id']) - resp = self.get(url) - - self.assertLen(1, resp.json) - - def test_get_payment_method(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - _, method = self.create_payment_method(values=values) - - url = self.item_path(self.merchant['id'], - self.customer['id'], method['id']) - - resp = self.get(url) - - self.assertData(resp.json, method) - - def test_update_payment_method(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - fixture, method = self.create_payment_method(values=values) - - url = self.item_path(self.merchant['id'], - self.customer['id'], method['id']) - - expected = dict(fixture, name='test2') - resp = self.patch_(url, expected) - self.assertData(expected, resp.json) - - def test_delete_payment_method(self): - values = { - 'provider_config_id': self.pg_config['id'], - 'customer_id': self.customer['id'] - } - _, method = self.create_payment_method(values=values) - - url = self.item_path(self.merchant['id'], - self.customer['id'], method['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_plan.py b/billingstack/tests/api/v2/test_plan.py deleted file mode 100644 index 5cc0360..0000000 --- a/billingstack/tests/api/v2/test_plan.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Plans -""" - -from billingstack.tests.api.v2 import V2Test - - -class TestPlan(V2Test): - __test__ = True - path = "merchants/%s/plans" - - def test_create_plan(self): - fixture = self.get_fixture('plan') - - url = self.path % self.merchant['id'] - - resp = self.post(url, fixture) - - self.assertData(fixture, resp.json) - - def test_list_plans(self): - self.create_plan(self.merchant['id']) - - url = self.path % self.merchant['id'] - resp = self.get(url) - - self.assertLen(1, resp.json) - - def test_get_plan(self): - _, plan = self.create_plan(self.merchant['id']) - - url = self.item_path(self.merchant['id'], plan['id']) - resp = self.get(url) - - self.assertData(resp.json, plan) - - def test_update_plan(self): - _, plan = self.create_plan(self.merchant['id']) - plan['name'] = 'test' - - url = self.item_path(self.merchant['id'], plan['id']) - resp = self.patch_(url, plan) - - self.assertData(resp.json, plan) - - def test_delete_plan(self): - _, plan = self.create_plan(self.merchant['id']) - - url = self.item_path(self.merchant['id'], plan['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_plans(self.admin_ctxt)) diff --git a/billingstack/tests/api/v2/test_product.py b/billingstack/tests/api/v2/test_product.py deleted file mode 100644 index 3c3ffab..0000000 --- a/billingstack/tests/api/v2/test_product.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Test Products -""" - -import logging - -from billingstack.tests.api.v2 import V2Test - -LOG = logging.getLogger(__name__) - - -class TestProduct(V2Test): - __test__ = True - path = "merchants/%s/products" - - def test_create_product(self): - fixture = self.get_fixture('product') - - url = self.path % self.merchant['id'] - resp = self.post(url, fixture) - - self.assertData(fixture, resp.json) - - def test_list_products(self): - self.create_product(self.merchant['id']) - - url = self.path % self.merchant['id'] - resp = self.get(url) - - self.assertLen(1, resp.json) - - def test_get_product(self): - _, product = self.create_product(self.merchant['id']) - - url = self.item_path(self.merchant['id'], product['id']) - resp = self.get(url) - - self.assertData(resp.json, product) - - def test_update_product(self): - _, product = self.create_product(self.merchant['id']) - product['name'] = 'test' - - url = self.item_path(self.merchant['id'], product['id']) - resp = self.patch_(url, product) - - self.assertData(resp.json, product) - - def test_delete_product(self): - _, product = self.create_product(self.merchant['id']) - - url = self.item_path(self.merchant['id'], product['id']) - self.delete(url) - - self.assertLen(0, self.services.central.list_products(self.admin_ctxt)) diff --git a/billingstack/tests/base.py b/billingstack/tests/base.py deleted file mode 100644 index 71db82b..0000000 --- a/billingstack/tests/base.py +++ /dev/null @@ -1,488 +0,0 @@ -import copy -import os -import shutil -import uuid - -import fixtures -import mox -import stubout -import testtools - -from oslo.config import cfg -# NOTE: Currently disabled -# from billingstack.openstack.common import policy -from billingstack import exceptions -from billingstack import paths -from billingstack import samples -from billingstack.storage import utils as storage_utils -from billingstack.openstack.common.context import RequestContext, \ - get_admin_context -from billingstack.openstack.common import importutils - - -cfg.CONF.import_opt( - 'rpc_backend', - 'billingstack.openstack.common.rpc.impl_fake') - - -CONF = cfg.CONF -CONF.import_opt('host', 'billingstack.netconf') - - -STORAGE_CACHE = {} - - -# Config Methods -def set_config(**kwargs): - group = kwargs.pop('group', None) - - for k, v in kwargs.iteritems(): - cfg.CONF.set_override(k, v, group) - - -class ConfFixture(fixtures.Fixture): - """Fixture to manage global conf settings.""" - - def __init__(self, conf): - self.conf = conf - - def setUp(self): - super(ConfFixture, self).setUp() - self.conf.set_default('host', 'fake-mini') - self.conf.set_default('fake_rabbit', True) - self.conf.set_default('rpc_backend', - 'billingstack.openstack.common.rpc.impl_fake') - self.conf.set_default('rpc_cast_timeout', 5) - self.conf.set_default('rpc_response_timeout', 5) - self.conf.set_default('verbose', True) - self.addCleanup(self.conf.reset) - - -class FixtureHelper(object): - """Underlying helper object for a StorageFixture to hold driver methods""" - - def __init__(self, fixture): - """ - :param fixture: The fixture object - """ - self.fixture = fixture - - def setUp(self): - """Runs pr test, typically a db reset or similar""" - - def pre_migrate(self): - """Run before migrations""" - - def migrate(self): - """Migrate the storage""" - - def post_migrate(self): - """This is executed after migrations""" - - def post_init(self): - """Runs at the end of the object initialization""" - - -class SQLAlchemyHelper(FixtureHelper): - def __init__(self, fixture): - super(SQLAlchemyHelper, self).__init__(fixture) - - self.sqlite_db = fixture.kw.get('sqlite_db') - self.sqlite_clean_db = fixture.kw.get('sqlite_clean_db') - self.testdb = None - - def setUp(self): - if self.fixture.database_connection == "sqlite://": - conn = self.fixture.connection.engine.connect() - conn.connection.executescript(self._as_string) - self.fixture.addCleanup(self.fixture.connection.engine.dispose) - else: - shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db), - paths.state_path_rel(self.sqlite_db)) - - def pre_migrate(self): - self.fixture.connection.engine.dispose() - self.fixture.connection.engine.connect() - if self.fixture.database_connection == "sqlite://": - #https://github.com/openstack/nova/blob/master/nova/test.py#L82-L84 - pass - else: - testdb = paths.state_path_rel(self.sqlite_db) - if os.path.exists(testdb): - return - - def migrate(self): - self.fixture.connection.setup_schema() - - def post_init(self): - if self.fixture.database_connection == "sqlite://": - conn = self.fixture.connection.engine.connect() - try: - self._as_string = "".join( - l for l in conn.connection.iterdump()) - except Exception: - print "".join(l for l in conn.connection.iterdump()) - raise - self.fixture.connection.engine.dispose() - else: - cleandb = paths.state_path_rel(self.sqlite_clean_db) - shutil.copyfile(self.testdb, cleandb) - - -class StorageFixture(fixtures.Fixture): - """ - Storage fixture that for now just supports SQLAlchemy - """ - def __init__(self, svc, **kw): - self.svc = svc - self.kw = kw - - self.driver = kw.get('storage_driver', 'sqlalchemy') - self.database_connection = kw.get('database_connection', 'sqlite://') - - self.svc_group = 'service:%s' % self.svc - self.driver_group = '%s:%s' % (self.svc, self.driver) - - cfg.CONF.import_opt('storage_driver', 'billingstack.%s' % self.svc, - group=self.svc_group) - set_config(storage_driver=self.driver, group=self.svc_group) - - # FIXME: Workout a way to support the different storage types - self.helper = SQLAlchemyHelper(self) - - cfg.CONF.import_opt( - 'database_connection', - 'billingstack.%s.storage.impl_%s' % (self.svc, self.driver), - group=self.driver_group) - - set_config(database_connection=self.database_connection, - group=self.driver_group) - - self.connection = self.get_storage_connection(**kw) - - self.helper.pre_migrate() - self.helper.migrate() - self.helper.post_migrate() - self.helper.post_init() - - for hook in kw.get('hooks', []): - hook() - - def setUp(self): - super(StorageFixture, self).setUp() - self.helper.setUp() - - def get_storage_connection(self, **kw): - """ - Import the storage module for the service that we are going to act on, - then return a connection object for that storage module. - """ - return storage_utils.get_connection(self.svc, self.driver) - - -class ServiceFixture(fixtures.Fixture): - """Run service as a test fixture, semi-copied from Nova""" - - def __init__(self, name, host=None, **kwargs): - host = host and host or uuid.uuid4().hex - kwargs.setdefault('host', host) - kwargs.setdefault('binary', 'billingstack-%s' % name) - self.name = name - self.kwargs = kwargs - - self.cls = self.get_service(self.name) - - @staticmethod - def get_service(svc): - """ - Return a service - - :param service: The service. - """ - return importutils.import_class('billingstack.%s.service.Service' % - svc) - - def setUp(self): - super(ServiceFixture, self).setUp() - self.service = self.cls() - self.service.start() - - -class MoxStubout(fixtures.Fixture): - """Deal with code around mox and stubout as a fixture.""" - - def setUp(self): - super(MoxStubout, self).setUp() - # emulate some of the mox stuff, we can't use the metaclass - # because it screws with our generators - self.mox = mox.Mox() - self.stubs = stubout.StubOutForTesting() - self.addCleanup(self.stubs.UnsetAll) - self.addCleanup(self.stubs.SmartUnsetAll) - self.addCleanup(self.mox.UnsetStubs) - self.addCleanup(self.mox.VerifyAll) - - -class AssertMixin(object): - """ - Mixin to hold assert helpers. - - """ - def assertLen(self, expected_length, obj): - """ - Assert a length of a object - - :param obj: The object ot run len() on - :param expected_length: The length in Int that's expected from len(obj) - """ - self.assertEqual(len(obj), expected_length) - - def assertData(self, expected_data, data): - """ - A simple helper to very that at least fixture data is the same - as returned - - :param expected_data: Data that's expected - :param data: Data to check expected_data against - """ - for key, value in expected_data.items(): - self.assertEqual(data[key], value) - - def assertDuplicate(self, func, *args, **kw): - exception = kw.pop('exception', exceptions.Duplicate) - with testtools.ExpectedException(exception): - func(*args, **kw) - - def assertMissing(self, func, *args, **kw): - exception = kw.pop('exception', exceptions.NotFound) - with testtools.ExpectedException(exception): - func(*args, **kw) - - -class BaseTestCase(testtools.TestCase, AssertMixin): - """ - A base test class to be used for typically non-service kind of things. - """ - def setUp(self): - super(BaseTestCase, self).setUp() - - test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) - try: - test_timeout = int(test_timeout) - except ValueError: - # If timeout value is invalid do not set a timeout. - test_timeout = 0 - if test_timeout > 0: - self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) - - if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or - os.environ.get('OS_STDOUT_CAPTURE') == '1'): - stdout = self.useFixture(fixtures.StringStream('stdout')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) - if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or - os.environ.get('OS_STDERR_CAPTURE') == '1'): - stderr = self.useFixture(fixtures.StringStream('stderr')).stream - self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) - - self.log_fixture = self.useFixture(fixtures.FakeLogger()) - self.useFixture(ConfFixture(cfg.CONF)) - - mox_fixture = self.useFixture(MoxStubout()) - self.mox = mox_fixture - self.stubs = mox_fixture.stubs - self.addCleanup(self._clear_attrs) - self.useFixture(fixtures.EnvironmentVariable('http_proxy')) - #self.policy = self.useFixture(policy_fixture.PolicyFixture()) - - def _clear_attrs(self): - # Delete attributes that don't start with _ so they don't pin - # memory around unnecessarily for the duration of the test - # suite - for key in [k for k in self.__dict__.keys() if k[0] != '_']: - del self.__dict__[key] - - def get_fixture(self, name, fixture=0, values={}): - """ - Get a fixture from self.samples and override values if necassary - """ - _values = copy.copy(self.samples[name][fixture]) - _values.update(values) - return _values - - def path_get(self, project_file=None): - root = os.path.abspath(os.path.join(os.path.dirname(__file__), - '..', - '..', - ) - ) - if project_file: - return os.path.join(root, project_file) - else: - return root - - -class Services(dict): - def __getattr__(self, name): - if name not in self: - raise AttributeError(name) - return self[name] - - def __setattr__(self, name, value): - self[name] = value - - -class TestCase(BaseTestCase): - """Base test case for services etc""" - def setUp(self): - super(TestCase, self).setUp() - - self.samples = samples.get_samples() - self.admin_ctxt = self.get_admin_context() - - # NOTE: No services up by default - self.services = Services() - - def get_admin_context(self, **kw): - return get_admin_context(**kw) - - def get_context(self, **kw): - return RequestContext(**kw) - - def start_service(self, name, host=None, **kwargs): - fixture = self.useFixture(ServiceFixture(name, host, **kwargs)) - self.services[name] = fixture.service - return fixture - - def start_storage(self, name, **kw): - fixture = StorageFixture(name, **kw) - global STORAGE_CACHE - if not name in STORAGE_CACHE: - STORAGE_CACHE[name] = fixture - self.useFixture(STORAGE_CACHE[name]) - return fixture - - def setSamples(self): - _, self.currency = self.create_currency() - _, self.language = self.create_language() - _, self.merchant = self.create_merchant() - - def _account_defaults(self, values): - # NOTE: Do defaults - if not 'currency_name' in values: - values['currency_name'] = self.currency['name'] - - if not 'language_name' in values: - values['language_name'] = self.language['name'] - - def create_language(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_currency(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def crealfte_invoice_state(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def pg_provider_register(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_merchant(self, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_pg_config(self, merchant_id, fixture=0, values={}, - **kw): - raise NotImplementedError - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_payment_method(self, customer_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def user_add(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - raise NotImplementedError - - -class ServiceTestCase(TestCase): - """Testcase with some base methods when running in Service ish mode""" - def create_language(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('language', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_language(ctxt, fixture, - **kw) - - def create_currency(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('currency', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_currency(ctxt, fixture, - **kw) - - def create_invoice_state(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('invoice_state', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.biller.create_invoice_state( - ctxt, fixture, **kw) - - def pg_provider_register(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('pg_provider', fixture, values) - if 'methods' not in fixture: - fixture['methods'] = [self.get_fixture('pg_method')] - ctxt = kw.pop('context', self.admin_ctxt) - - data = self.services.collector.storage_conn.pg_provider_register( - ctxt, fixture, **kw) - - return fixture, data - - def create_merchant(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('merchant', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - self._account_defaults(fixture) - - return fixture, self.services.central.create_merchant( - ctxt, fixture, **kw) - - def create_pg_config(self, fixture=0, values={}, - **kw): - fixture = self.get_fixture('pg_config', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.collector.create_pg_config( - ctxt, fixture, **kw) - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('customer', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - self._account_defaults(fixture) - return fixture, self.services.central.create_customer( - ctxt, merchant_id, fixture, **kw) - - def create_payment_method(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('payment_method', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.collector.create_payment_method( - ctxt, fixture, **kw) - - def user_add(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('user', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.user_add( - ctxt, merchant_id, fixture, **kw) - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('product', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_product( - ctxt, merchant_id, fixture, **kw) - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('plan', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.services.central.create_plan( - ctxt, merchant_id, fixture, **kw) diff --git a/billingstack/tests/biller/__init__.py b/billingstack/tests/biller/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/biller/storage/__init__.py b/billingstack/tests/biller/storage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/central/__init__.py b/billingstack/tests/central/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/central/storage/__init__.py b/billingstack/tests/central/storage/__init__.py deleted file mode 100644 index bb6ed54..0000000 --- a/billingstack/tests/central/storage/__init__.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log as logging -from billingstack.central.storage.impl_sqlalchemy import models - - -LOG = logging.getLogger(__name__) - - -UUID = 'caf771fc-6b05-4891-bee1-c2a48621f57b' - - -class DriverMixin(object): - def create_language(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('language', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_language(ctxt, fixture, **kw) - - def create_currency(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('currency', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_currency(ctxt, fixture, **kw) - - def create_merchant(self, fixture=0, values={}, **kw): - fixture = self.get_fixture('merchant', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - self._account_defaults(fixture) - - return fixture, self.storage_conn.create_merchant(ctxt, fixture, **kw) - - def create_customer(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('customer', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - self._account_defaults(fixture) - return fixture, self.storage_conn.create_customer( - ctxt, merchant_id, fixture, **kw) - - def create_product(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('product', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_product( - ctxt, merchant_id, fixture, **kw) - - def create_plan(self, merchant_id, fixture=0, values={}, **kw): - fixture = self.get_fixture('plan', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_plan( - ctxt, merchant_id, fixture, **kw) - - # Currencies - def test_create_currency(self): - self.assertDuplicate(self.create_currency) - - # Languages - def test_create_language(self): - self.assertDuplicate(self.create_language) - - def test_set_properties(self): - fixture, data = self.create_product(self.merchant['id']) - - metadata = {"random": True} - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - metadata.update({'foo': 1, 'bar': 2}) - self.storage_conn.set_properties(data['id'], metadata, - cls=models.Product) - - actual = self.storage_conn.get_product(self.admin_ctxt, data['id']) - self.assertLen(6, actual['properties']) - - # Merchant - def test_create_merchant(self): - fixture, data = self.create_merchant() - self.assertData(fixture, data) - - def test_get_merchant(self): - _, expected = self.create_merchant() - actual = self.storage_conn.get_merchant( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_merchant_missing(self): - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, UUID) - - def test_update_merchant(self): - fixture, data = self.create_merchant() - - fixture['name'] = 'test' - updated = self.storage_conn.update_merchant( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_merchant_missing(self): - self.assertMissing(self.storage_conn.update_merchant, - self.admin_ctxt, UUID, {}) - - def test_delete_merchant(self): - self.storage_conn.delete_merchant(self.admin_ctxt, self.merchant['id']) - self.assertMissing(self.storage_conn.get_merchant, - self.admin_ctxt, self.merchant['id']) - - def test_delete_merchant_missing(self): - self.assertMissing(self.storage_conn.delete_merchant, - self.admin_ctxt, UUID) - - # Customer - def test_create_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - assert data['default_info'] == {} - assert data['contact_info'] == [] - self.assertData(fixture, data) - - def test_create_customer_with_contact_info(self): - contact_fixture = self.get_fixture('contact_info') - customer_fixture, data = self.create_customer( - self.merchant['id'], - values={'contact_info': contact_fixture}) - self.assertData(customer_fixture, data) - self.assertData(contact_fixture, data['default_info']) - self.assertData(contact_fixture, data['contact_info'][0]) - - def test_get_customer(self): - _, expected = self.create_customer(self.merchant['id']) - actual = self.storage_conn.get_customer( - self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_customer_missing(self): - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, UUID) - - def test_update_customer(self): - fixture, data = self.create_customer(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_customer( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_customer_missing(self): - self.assertMissing(self.storage_conn.update_customer, - self.admin_ctxt, UUID, {}) - - def test_delete_customer(self): - _, data = self.create_customer(self.merchant['id']) - self.storage_conn.delete_customer(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_customer, - self.admin_ctxt, data['id']) - - def test_delete_customer_missing(self): - self.assertMissing(self.storage_conn.delete_customer, - self.admin_ctxt, UUID) - - # Products - def test_create_product(self): - f, data = self.create_product(self.merchant['id']) - self.assertData(f, data) - - def test_get_product(self): - f, expected = self.create_product(self.merchant['id']) - actual = self.storage_conn.get_product(self.admin_ctxt, expected['id']) - self.assertData(expected, actual) - - def test_get_product_missing(self): - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, UUID) - - def test_update_product(self): - fixture, data = self.create_product(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_product( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_product_missing(self): - self.assertMissing(self.storage_conn.update_product, - self.admin_ctxt, UUID, {}) - - def test_delete_product(self): - fixture, data = self.create_product(self.merchant['id']) - self.storage_conn.delete_product(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_product, - self.admin_ctxt, data['id']) - - def test_delete_product_missing(self): - self.assertMissing(self.storage_conn.delete_product, - self.admin_ctxt, UUID) - - # Plan - def test_create_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.assertData(fixture, data) - - def test_get_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - actual = self.storage_conn.get_plan(self.admin_ctxt, data['id']) - - # FIXME(ekarlso): This should test the actual items also? But atm - # there's am error that if the value is int when getting added it's - # string when returned... - self.assertEqual(data['name'], actual['name']) - self.assertEqual(data['title'], actual['title']) - self.assertEqual(data['description'], actual['description']) - - def test_get_plan_missing(self): - self.assertMissing(self.storage_conn.get_plan, self.admin_ctxt, UUID) - - def test_update_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - - fixture['name'] = 'test' - updated = self.storage_conn.update_plan( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_plan_missing(self): - self.assertMissing(self.storage_conn.update_plan, - self.admin_ctxt, UUID, {}) - - def test_delete_plan(self): - fixture, data = self.create_plan(self.merchant['id']) - self.storage_conn.delete_plan(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_plan, - self.admin_ctxt, data['id']) - - def test_delete_plan_missing(self): - self.assertMissing(self.storage_conn.delete_plan, - self.admin_ctxt, UUID) diff --git a/billingstack/tests/central/storage/test_sqlalchemy.py b/billingstack/tests/central/storage/test_sqlalchemy.py deleted file mode 100644 index 38b7653..0000000 --- a/billingstack/tests/central/storage/test_sqlalchemy.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: billingstack -from billingstack.openstack.common import log as logging -from billingstack.tests.base import TestCase -from billingstack.tests.central.storage import DriverMixin - -LOG = logging.getLogger(__name__) - - -class SqlalchemyStorageTest(DriverMixin, TestCase): - def setUp(self): - super(SqlalchemyStorageTest, self).setUp() - fixture = self.start_storage('central') - self.storage_conn = fixture.connection - self.setSamples() diff --git a/billingstack/tests/collector/__init__.py b/billingstack/tests/collector/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/collector/storage/__init__.py b/billingstack/tests/collector/storage/__init__.py deleted file mode 100644 index 88bf34d..0000000 --- a/billingstack/tests/collector/storage/__init__.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from billingstack.openstack.common import log as logging -from billingstack.openstack.common.uuidutils import generate_uuid - - -LOG = logging.getLogger(__name__) - - -UUID = generate_uuid() -MERCHANT_UUID = generate_uuid() -CUSTOMER_UUID = generate_uuid() - - -class DriverMixin(object): - def pg_provider_register(self, fixture=0, values={}, methods=[], **kw): - methods = [self.get_fixture('pg_method')] or methods - if not 'methods' in values: - values['methods'] = methods - - fixture = self.get_fixture('pg_provider', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - - data = self.storage_conn.pg_provider_register( - ctxt, fixture.copy(), **kw) - - return fixture, data - - def create_pg_config(self, fixture=0, values={}, - **kw): - fixture = self.get_fixture('pg_config', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_pg_config( - ctxt, fixture, **kw) - - def create_payment_method(self, fixture=0, - values={}, **kw): - fixture = self.get_fixture('payment_method', fixture, values) - ctxt = kw.pop('context', self.admin_ctxt) - return fixture, self.storage_conn.create_payment_method( - ctxt, fixture, **kw) - - # Payment Gateways - def test_pg_provider_register(self): - fixture, actual = self.pg_provider_register() - self.assertEqual(fixture['name'], actual['name']) - self.assertEqual(fixture['title'], actual['title']) - self.assertEqual(fixture['description'], actual['description']) - self.assertData(fixture['methods'][0], actual['methods'][0]) - - def test_pg_provider_register_different_methods(self): - # Add a Global method - method1 = {'type': 'creditcard', 'name': 'mastercard'} - method2 = {'type': 'creditcard', 'name': 'amex'} - method3 = {'type': 'creditcard', 'name': 'visa'} - - provider = {'name': 'noop', 'methods': [method1, method2, method3]} - - provider = self.storage_conn.pg_provider_register( - self.admin_ctxt, provider) - - # TODO(ekarls): Make this more extensive? - self.assertLen(3, provider['methods']) - - def test_get_pg_provider(self): - _, expected = self.pg_provider_register() - actual = self.storage_conn.get_pg_provider(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - def test_get_pg_provider_missing(self): - self.assertMissing(self.storage_conn.get_pg_provider, - self.admin_ctxt, UUID) - - def test_pg_provider_deregister(self): - _, data = self.pg_provider_register() - self.storage_conn.pg_provider_deregister(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, data['id']) - - def test_pg_provider_deregister_missing(self): - self.assertMissing(self.storage_conn.pg_provider_deregister, - self.admin_ctxt, UUID) - - # Payment Gateway Configuration - def test_create_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - fixture, data = self.create_pg_config(values=values) - - self.assertData(fixture, data) - - def test_get_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - - fixture, data = self.create_pg_config(values=values) - - def test_get_pg_config_missing(self): - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, UUID) - - def test_update_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - fixture, data = self.create_pg_config(values=values) - - fixture['properties'] = {"api": 1} - updated = self.storage_conn.update_pg_config( - self.admin_ctxt, data['id'], fixture) - - self.assertData(fixture, updated) - - def test_update_pg_config_missing(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - - fixture, data = self.create_pg_config(values=values) - - self.assertMissing(self.storage_conn.update_pg_config, - self.admin_ctxt, UUID, {}) - - def test_delete_pg_config(self): - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id']} - - fixture, data = self.create_pg_config(values=values) - - self.storage_conn.delete_pg_config(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_pg_config, - self.admin_ctxt, data['id']) - - def test_delete_pg_config_missing(self): - self.assertMissing(self.storage_conn.delete_pg_config, - self.admin_ctxt, UUID) - - # PaymentMethod - def test_create_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method(values=values) - self.assertData(fixture, data) - - def test_get_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - _, expected = self.create_payment_method(values=values) - actual = self.storage_conn.get_payment_method(self.admin_ctxt, - expected['id']) - self.assertData(expected, actual) - - # TODO(ekarlso): Make this test more extensive? - def test_list_payment_methods(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Add two Customers with some methods - customer1_id = generate_uuid() - values = { - 'customer_id': customer1_id, - 'provider_config_id': config['id']} - self.create_payment_method(values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer1_id}) - self.assertLen(1, rows) - - customer2_id = generate_uuid() - values = { - 'customer_id': customer2_id, - 'provider_config_id': config['id']} - self.create_payment_method(values=values) - self.create_payment_method(values=values) - rows = self.storage_conn.list_payment_methods( - self.admin_ctxt, - criterion={'customer_id': customer2_id}) - self.assertLen(2, rows) - - def test_get_payment_method_missing(self): - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, UUID) - - def test_update_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method(values=values) - - fixture['identifier'] = 1 - updated = self.storage_conn.update_payment_method( - self.admin_ctxt, - data['id'], - fixture) - - self.assertData(fixture, updated) - - def test_update_payment_method_missing(self): - self.assertMissing(self.storage_conn.update_payment_method, - self.admin_ctxt, UUID, {}) - - def test_delete_payment_method(self): - # Setup pgp / pgm / pgc - _, provider = self.pg_provider_register() - - values = { - 'merchant_id': MERCHANT_UUID, - 'provider_id': provider['id'] - } - _, config = self.create_pg_config(values=values) - - # Setup PaymentMethod - values = { - 'customer_id': CUSTOMER_UUID, - 'provider_config_id': config['id']} - - fixture, data = self.create_payment_method(values=values) - - self.storage_conn.delete_payment_method(self.admin_ctxt, data['id']) - self.assertMissing(self.storage_conn.get_payment_method, - self.admin_ctxt, data['id']) - - def test_delete_payment_method_missing(self): - self.assertMissing(self.storage_conn.delete_payment_method, - self.admin_ctxt, UUID) diff --git a/billingstack/tests/collector/storage/test_sqlalchemy.py b/billingstack/tests/collector/storage/test_sqlalchemy.py deleted file mode 100644 index df654d2..0000000 --- a/billingstack/tests/collector/storage/test_sqlalchemy.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: billingstack -from billingstack.openstack.common import log as logging -from billingstack.tests.base import TestCase -from billingstack.tests.collector.storage import DriverMixin - -LOG = logging.getLogger(__name__) - - -class SqlalchemyStorageTest(DriverMixin, TestCase): - def setUp(self): - super(SqlalchemyStorageTest, self).setUp() - fixture = self.start_storage('collector') - self.storage_conn = fixture.connection diff --git a/billingstack/tests/payment_gateway/__init__.py b/billingstack/tests/payment_gateway/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/payment_gateway/base.py b/billingstack/tests/payment_gateway/base.py deleted file mode 100644 index 23b3bf9..0000000 --- a/billingstack/tests/payment_gateway/base.py +++ /dev/null @@ -1,63 +0,0 @@ -from billingstack.openstack.common import log -from billingstack.tests.base import TestCase - - -LOG = log.getLogger(__name__) - - -class ProviderTestCase(TestCase): - """ - Common set of tests for the API that all Providers should implement - """ - __test__ = False - - def setUp(self): - super(ProviderTestCase, self).setUp() - - info = self.get_fixture('contact_info') - _, self.customer = self.create_customer( - self.merchant['id'], - contact_info=info) - - _, self.provider = self.pg_provider_register() - - def test_create_account(self): - expected = self.pgp.create_account(self.customer) - actual = self.pgp.get_account(self.customer['id']) - self.assertEqual(expected['id'], actual['id']) - - def test_list_accounts(self): - self.pgp.create_account(self.customer) - actual = self.pgp.list_accounts() - self.assertLen(0, actual) - - def test_get_account(self): - expected = self.pgp.create_account(self.customer) - actual = self.pgp.get_account(self.customer['id']) - self.assertEqual(expected['id'], actual['id']) - - def test_delete_account(self): - data = self.pgp.create_account(self.customer) - self.pgp.delete_account(data['id']) - - def pm_create(self): - """ - Create all the necassary things to make a card - """ - fixture, data = self.create_payment_method( - self.customer['id'], - self.provider['methods'][0]['id']) - - self.pgp.create_account(self.customer) - return fixture, self.pgp.create_payment_method(data) - - def test_create_payment_method(self): - fixture, pm = self.pm_create() - - def test_list_payment_methods(self): - fixture, pm = self.pm_create() - assert len(self.pgp.list_payment_method(self.customer['id'])) == 1 - - def test_get_payment_method(self): - fixture, pm = self.pm_create() - assert pm == self.pgp.get_payment_method(pm['id']) diff --git a/billingstack/tests/rater/__init__.py b/billingstack/tests/rater/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/rater/storage/__init__.py b/billingstack/tests/rater/storage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/storage/__init__.py b/billingstack/tests/storage/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/billingstack/tests/test_utils.py b/billingstack/tests/test_utils.py deleted file mode 100644 index 308fe95..0000000 --- a/billingstack/tests/test_utils.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest2 - - -from billingstack import exceptions -from billingstack import utils - - -class UtilsTests(unittest2.TestCase): - def test_get_currency(self): - currency = utils.get_currency('nok') - expected = {'name': u'nok', 'title': u'Norwegian Krone'} - self.assertEqual(expected, currency) - - def test_get_language(self): - lang = utils.get_language('nor') - expected = {'title': u'Norwegian', 'name': u'nor'} - self.assertEqual(expected, lang) - - def test_invalid_raises(self): - with self.assertRaises(exceptions.InvalidObject) as cm: - utils.get_language('random') - self.assertEqual(cm.exception.errors, {'terminology': 'random'}) diff --git a/billingstack/utils.py b/billingstack/utils.py deleted file mode 100644 index ca429cb..0000000 --- a/billingstack/utils.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- encoding: utf-8 -*- -## -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import os -import pycountry -import re -import time - -from oslo.config import cfg - -from billingstack import exceptions -from billingstack.openstack.common import log - - -LOG = log.getLogger(__name__) - - -def find_config(config_path): - """ - Find a configuration file using the given hint. - - Code nabbed from cinder. - - :param config_path: Full or relative path to the config. - :returns: List of config paths - """ - possible_locations = [ - config_path, - os.path.join(cfg.CONF.state_path, "etc", "billingstack", config_path), - os.path.join(cfg.CONF.state_path, "etc", config_path), - os.path.join(cfg.CONF.state_path, config_path), - "/etc/billingstack/%s" % config_path, - ] - - found_locations = [] - - for path in possible_locations: - LOG.debug('Searching for configuration at path: %s' % path) - if os.path.exists(path): - LOG.debug('Found configuration at path: %s' % path) - found_locations.append(os.path.abspath(path)) - - return found_locations - - -def read_config(prog, argv): - config_files = find_config('%s.conf' % prog) - - cfg.CONF(argv[1:], project='billingstack', prog=prog, - default_config_files=config_files) - - -def capital_to_underscore(string): - return "_".join(l.lower() for l in re.findall('[A-Z][^A-Z]*', - string)) - - -def underscore_to_capital(string): - return ''.join(x.capitalize() or '_' for x in string.split('_')) - - -def get_country(country_obj, **kw): - try: - obj = country_obj.get(**kw) - except KeyError: - raise exceptions.InvalidObject(errors=kw) - return dict([(k, v) for k, v in obj.__dict__.items() - if not k.startswith('_')]) - - -def get_currency(name): - obj = get_country(pycountry.currencies, letter=name.upper()) - return { - 'name': obj['letter'].lower(), - 'title': obj['name']} - - -def get_language(name): - obj = get_country(pycountry.languages, terminology=name) - data = {'name': obj['terminology'].lower(), 'title': obj['name']} - return data - - -def get_item_properties(item, fields, mixed_case_fields=[], formatters={}): - """Return a tuple containing the item properties. - - :param item: a single item resource (e.g. Server, Tenant, etc) - :param fields: tuple of strings with the desired field names - :param mixed_case_fields: tuple of field names to preserve case - :param formatters: dictionary mapping field names to callables - to format the values - """ - row = [] - - for field in fields: - if field in formatters: - row.append(formatters[field](item)) - else: - if field in mixed_case_fields: - field_name = field.replace(' ', '_') - else: - field_name = field.lower().replace(' ', '_') - if not hasattr(item, field_name) and \ - (isinstance(item, dict) and field_name in item): - data = item[field_name] - else: - data = getattr(item, field_name, '') - if data is None: - data = '' - row.append(data) - return tuple(row) - - -def get_columns(data): - """ - Some row's might have variable count of columns, ensure that we have the - same. - - :param data: Results in [{}, {]}] - """ - columns = set() - - def _seen(col): - columns.add(str(col)) - - map(lambda item: map(_seen, item.keys()), data) - return list(columns) - - -def unixtime(dt_obj): - """Format datetime object as unix timestamp - - :param dt_obj: datetime.datetime object - :returns: float - - """ - return time.mktime(dt_obj.utctimetuple()) diff --git a/billingstack/version.py b/billingstack/version.py deleted file mode 100644 index 5341162..0000000 --- a/billingstack/version.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2012 Managed I.T. -# -# Author: Kiall Mac Innes -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: Moniker -import pbr.version -version_info = pbr.version.VersionInfo('billingstack') diff --git a/bin/billingstack-db-manage b/bin/billingstack-db-manage deleted file mode 100755 index 4dc66b0..0000000 --- a/bin/billingstack-db-manage +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2012 New Dream Network, LLC (DreamHost) -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys -sys.path.insert(0, os.getcwd()) - -from billingstack.storage.impl_sqlalchemy.migration.cli import main - - -main() diff --git a/bin/billingstack-manage b/bin/billingstack-manage deleted file mode 100755 index 484e6c5..0000000 --- a/bin/billingstack-manage +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python -# -# Author: Endre Karlson -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Copied: billingstack -import sys - -from oslo.config import cfg - -from billingstack import utils -from billingstack.manage import Shell - -# TODO: Sypport passing --config-file and --config-dir to read_config -utils.read_config('billingstack', []) - -shell = Shell() -sys.exit(shell.run(sys.argv[1:])) - diff --git a/bin/billingstack-rpc-zmq-receiver b/bin/billingstack-rpc-zmq-receiver deleted file mode 100755 index 77f9fde..0000000 --- a/bin/billingstack-rpc-zmq-receiver +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import eventlet -eventlet.monkey_patch() - -import contextlib -import os -import sys - -# If ../billingstack/__init__.py exists, add ../ to Python search path, so that -# it will override what happens to be installed in /usr/(local/)lib/python... -POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'billingstack', '__init__.py')): - sys.path.insert(0, POSSIBLE_TOPDIR) - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging -from billingstack.openstack.common import rpc -from billingstack.openstack.common.rpc import impl_zmq - -CONF = cfg.CONF -CONF.register_opts(rpc.rpc_opts) -CONF.register_opts(impl_zmq.zmq_opts) - - -def main(): - CONF(sys.argv[1:], project='billingstack') - logging.setup("billingstack") - - with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: - reactor.consume_in_thread() - reactor.wait() - -if __name__ == '__main__': - main() diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index 6b9d614..0000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -WebOb>=1.2 -eventlet -#pecan --e git://github.com/ryanpetrello/pecan.git@next#egg=pecan -stevedore -argparse --e hg+https://bitbucket.org/cdevienne/wsme/#egg=wsme -anyjson>=0.2.4 -pycountry -iso8601 -cliff -http://tarballs.openstack.org/oslo-config/oslo-config-2013.1b4.tar.gz#egg=oslo-config -unittest2 -nose -openstack.nose_plugin -nosehtmloutput -coverage -mock -mox -Babel>=0.9.6 -sphinx -sphinxcontrib-httpdomain -docutils==0.9.1 # for bug 1091333, remove after sphinx >1.1.3 is released. diff --git a/doc/source/api.rst b/doc/source/api.rst deleted file mode 100644 index b4aaa35..0000000 --- a/doc/source/api.rst +++ /dev/null @@ -1,11 +0,0 @@ -API Documenation -================ - -Contents: - -.. toctree:: - :maxdepth: 2 - :glob: - - api/* - diff --git a/doc/source/architecture.rst b/doc/source/architecture.rst deleted file mode 100644 index 2352e12..0000000 --- a/doc/source/architecture.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _architecture: - - -============ -Architecture -============ - -.. index:: - double: architecture; brief - -Brief overview -++++++++++++++ - :term:`pgp` PaymentGatewayProvider - Typically a provider like Braintree. - :term:`pgm` PaymentGatewayMethod - A provider method typically like Visa or - similar. - :term:`api` standard OpenStack alike REST api services diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100644 index 479cc25..0000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -# -# billingstackclient documentation build configuration file, created by -# sphinx-quickstart on Wed Oct 31 18:58:17 2012. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'billingstack' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -from billingstack.version import version_info -version = version_info.canonical_version_string() -# The full version, including alpha/beta/rc tags. -release = version_info.version_string_with_vcs() - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'billingstack-doc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'billingstack.tex', u'BillingStack Documentation', - u'Bouvet ASA', 'manual') -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'billingstack', u'BillingStack Documentation', - [u'Bouvet ASA'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'billingstack', u'BillingStack Documentation', - u'Bouvet ASA', 'billingstack', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' diff --git a/doc/source/database.yuml b/doc/source/database.yuml deleted file mode 100644 index bc4a78b..0000000 --- a/doc/source/database.yuml +++ /dev/null @@ -1,37 +0,0 @@ -[PGMethod{bg:green}]1owner-1>[PGProvider] -[PGProvider{bg:green}]1-*>[PGMethod{bg:green}] -[ContactInfo]^-[CustomerInfo] -[Customer]1-*>[CustomerInfo] -[PGConfig]*-1>[PGProvider] -[Merchant]1-*>[User] -[Merchant]1-*>[PGConfig] -[Merchant]1-*>[Customer] -[Merchant]1-*>[Plan] -[Merchant]1-*>[Product] -[Merchant]*-1>[Currency{bg:green}] -[Merchant]*->[Language{bg:green}] -[Customer]1-*>[User] -[Customer]1-*>[Invoice] -[Customer]*-1>[Currency{bg:green}] -[Customer]*-1>[Language{bg:green}] -[Customer]1-*>[PaymentMethod] -[PaymentMethod]1-1>[PGMethod] -[User]1-*>[MerchantRole] -[User]1-*>[CustomerRole] -[User]1-1[ContactInfo] -[MerchantRole]1-1>[Merchant] -[CustomerRole]1-1>[Customer] -[Invoice]1-*>[InvoiceItems] -[Invoice]*-1>[InvoiceState] -[Invoice]*-1>[Currency] -[Invoice]*-1>[Merchant] -[Plan]1-*>[PlanItem] -[PlanItem]*-1>[Merchant] -[PlanItem]1-1>[Product] -[PlanItem]1-*>[Pricing] -[Product]1-*>[Pricing] -[Subscription]1-1>[Plan] -[Subscription]1-1>[Customer] -[Subscription]1-1>[PaymentMethod] -[Usage]*-1>[Subscription] -[Usage]1-1>[Product] diff --git a/doc/source/developing.rst b/doc/source/developing.rst deleted file mode 100644 index 73ff1ff..0000000 --- a/doc/source/developing.rst +++ /dev/null @@ -1,66 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _developing: - -======================= -Developing BillingStack -======================= - - -Setting up a development environment -==================================== - -.. index:: - double: development; env - -There are 2 ways to setting up a development environment -* :doc:install/manual - Manual setup for a more distributed / semi production env -* This: :ref:`development-env` - -1. Clone the repo - see :ref:`cloning-git` for generic information:: - - $ git clone http://github.com/stackforge/billingstack - -2. Change directory to the BS directory:: - - $ cd billingstack - -3. Setup a virtualenv with all deps included for the core:: - - $ python tools/install_venv.py - -Now wait for it to be ready ( Take a coffe break? ) - -3. Active the virtualenv:: - - $ source .venv/bin/activate - -4. You're ready to have fun! - - -Running tests -============= - -Using tox you can test towards multiple different isolated environments. - -For example if you want to test your PEP8 coverage that is needed to pass for -a change to merge:: - - $ tox -e pep8 - -Running the actualy in Python 2.7 tests:: - - $ tox -e py27 -v -- -v \ No newline at end of file diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst deleted file mode 100644 index 05b7c16..0000000 --- a/doc/source/glossary.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _architecture: - - -============ -Glossary -============ - -.. glossary:: - pgp - PaymentGatewayProvider - A plugin for PaymentGateways - pgm - PaymentGatewayMethod - A supported payment method by the PGP - api - Web API - central - The Central service that does CRUD operations and more in BS. - customer - An entity underneath :term:`merchant` that holds different data that - resembles a Customer in an external system like a Tenant, Project etc. - merchant - An entity that holds one or more users, can configure integration with - third party services like OpenStack ceilometer, configure api - credentials for API access etc. \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index 3e18706..0000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,28 +0,0 @@ -.. billingstack documentation master file, created by - sphinx-quickstart on Wed Oct 31 18:58:17 2012. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to BillingStack's documentation! -========================================== - -Contents: - -.. toctree:: - :maxdepth: 2 - - architecture - api - developing - glossary - install/index - resources/index - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/doc/source/install/common.rst b/doc/source/install/common.rst deleted file mode 100644 index 95a2e75..0000000 --- a/doc/source/install/common.rst +++ /dev/null @@ -1,85 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - -.. _system-deps:: - -System dependencies -=================== - -.. index:: - double: installing; common_steps - -.. note:: - The below operations should take place underneath your folder. - -Install module dependencies - -Debian, Ubuntu:: - - $ apt-get install python-pip python-lxml - -Fedora, Centos, RHEL:: - - $ yum install pip-python python-lxml - - -.. _storage-deps:: - -Storage dependencies -==================== - -.. index:: installing; storage - -Depending on the datastore that is currently supported and your pick of them -you need to install the underlying server and client libraries as well as -python bindings. - -See `System dependencies`_ before continuing. - -Example for MySQL on Debian, Ubuntu:: - - $ apt-get install mysql-server mysql-client libmysqlclient-dev - -Using MySQL bindings:: - - $ pip install MySQL-python - -Using oursql bindings (use 'mysql+oursql://.....' instead of 'mysql://'):: - - $ pip install oursql - - -.. _cloning-git:: - - -Cloning git repo -================ -1. Install GIT. - - On ubuntu you do the following:: - - $ apt-get install git-core - - On Fedora / Centos / RHEL:: - - $ apt-get install git - -2. Clone a BS repo off of Github:: - - $ git clone https://github.com/billingstack/ - $ cd - -3. Now continue with whatever other thing needs to be setup. \ No newline at end of file diff --git a/doc/source/install/index.rst b/doc/source/install/index.rst deleted file mode 100644 index 29673b6..0000000 --- a/doc/source/install/index.rst +++ /dev/null @@ -1,28 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _install: - -======================== - Installing Billingstack -======================== - -.. toctree:: - :maxdepth: 2 - - common - manual - macos - pgp diff --git a/doc/source/install/macos.rst b/doc/source/install/macos.rst deleted file mode 100644 index 23b98e8..0000000 --- a/doc/source/install/macos.rst +++ /dev/null @@ -1,167 +0,0 @@ -.. - Copyright 2013 Luis Gervaso - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - - -============================= - Installing Manually (Mac OS) -============================= - -Common Steps -============ - -.. index:: - double: installing; common_steps - -.. note:: - The below operations should take place underneath your /etc folder. - -0. Install Homebrew - - Please, follow the steps described `here `_ - -1. Install system package dependencies:: - - $ brew install python --framework - $ brew install rabbitmq - - .. note:: - - To have launchd start rabbitmq at login: - ln -sfv /usr/local/opt/rabbitmq/*.plist ~/Library/LaunchAgents - Then to load rabbitmq now: - launchctl load ~/Library/LaunchAgents/homebrew.mxcl.rabbitmq.plist - Or, if you don't want/need launchctl, you can just run: - rabbitmq-server - - Start RabbitMQ:: - - $ rabbitmq-server - - RabbitMQ 3.1.1. Copyright (C) 2007-2013 VMware, Inc. - - ## ## Licensed under the MPL. See http://www.rabbitmq.com/ - ## ## - ########## Logs: /usr/local/var/log/rabbitmq/rabbit@localhost.log - ###### ## /usr/local/var/log/rabbitmq/rabbit@localhost-sasl.log - ########## - - Starting broker... completed with 7 plugins. - -2. Clone the BillingStack repo off of Github:: - - $ git clone https://github.com/billingstack/billingstack.git - $ cd billingstack - -3. Setup virtualenv and Install BillingStack and it's dependencies - - .. note:: - - This is to not interfere with system packages etc. - - :: - - $ pip install virtualenv - $ python tools/install_venv.py - $ . .venv/bin/activate - $ python setup.py develop - - .. warning:: - - ValueError: unknown locale: UTF-8. - - To fix it you will have to set these environment variables in your ~/.profile or ~/.bashrc manually: - - export LANG=en_US.UTF-8 - export LC_ALL=en_US.UTF-8 - - Copy sample configs to usable ones, inside the `etc` folder do - - - :: - - $ sudo cp -r etc/billingstack /etc - $ cd /etc/billingstack - $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done - - .. note:: - - Change the wanted configuration settings to match your environment, the file - is in the `/etc/billingstack` folder:: - - :: - - $ vi /etc/billingstack/billingstack.conf - - -Installing Central -================== - -.. index:: - double: installing; central - -.. note:: - This is needed because it is the service that the API and others uses to - communicate with to do stuff in the Database. - -1. See `Common Steps`_ before proceeding. - -2. Create the DB for :term:`central` - - :: - - $ python tools/resync_billingstack.py - -3. Now you might want to load sample data for the time being - - :: - - $ python tools/load_samples.py - -4. Start the central service - - :: - - $ billingstack-central - - ... - - 2013-06-09 03:51:22 DEBUG [amqp] Open OK! - 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 - 2013-06-09 03:51:22 DEBUG [amqp] Channel open - 2013-06-09 03:51:22 INFO [...] Connected to AMQP server on localhost:5672 - 2013-06-09 03:51:22 DEBUG [...] Creating Consumer connection for Service central - - -Installing the API -==================== - -.. index:: - double: installing; api - -.. note:: - The API Server needs to able to talk via MQ to other services. - -1. See `Common Steps`_ before proceeding. - -2. Start the API service - - :: - - $ billingstack-api - - ... - - 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ \ No newline at end of file diff --git a/doc/source/install/manual.rst b/doc/source/install/manual.rst deleted file mode 100644 index 1a7f283..0000000 --- a/doc/source/install/manual.rst +++ /dev/null @@ -1,134 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - - -===================== - Installing Manually -===================== - -Common Steps -============ - -.. index:: - double: installing; common_steps - -.. note:: - The below operations should take place underneath your /etc folder. - -1. Install system package dependencies (Ubuntu) - - :: - - $ apt-get install python-pip - $ apt-get install rabbitmq-server - -2. Clone the BillingStack repo off of Github - - :: - - $ git clone https://github.com/stackforge/billingstack.git - $ cd billingstack - -3. Setup virtualenv and Install BillingStack and it's dependencies - - .. note:: - - This is to not interfere with system packages etc. - :: - - $ pip install virtualenv - $ python tools/install_venv.py - $ . .venv/bin/activate - $ python setup.py develop - - - Copy sample configs to usable ones, inside the `etc` folder do - - :: - - $ sudo cp -r etc/billingstack /etc - $ cd /etc/billingstack - $ sudo ls *.sample | while read f; do cp $f $(echo $f | sed "s/.sample$//g"); done - - .. note:: - - Change the wanted configuration settings to match your environment, the file - is in the `/etc/billingstack` folder - - :: - - $ vi /etc/billingstack/billingstack.conf - - -Installing Central -================== - -.. index:: - double: installing; central - -.. note:: - This is needed because it is the service that the API and others uses to - communicate with to do stuff in the Database. - -1. See `Common Steps`_ before proceeding. - -2. Create the DB for :term:`central` - - :: - - $ python tools/resync_billingstack.py - -3. Now you might want to load sample data for the time being - - :: - - $ python tools/load_samples.py - -4. Start the central service - - :: - - $ billingstack-central - - ... - - 2013-06-09 03:51:22 DEBUG [amqp] Open OK! - 2013-06-09 03:51:22 DEBUG [amqp] using channel_id: 1 - 2013-06-09 03:51:22 DEBUG [amqp] Channel open - 2013-06-09 03:51:22 INFO [...] Connected to AMQP server on localhost:5672 - 2013-06-09 03:51:22 DEBUG [...] Creating Consumer connection for Service central - - -Installing the API -==================== - -.. index:: - double: installing; api - -.. note:: - The API Server needs to able to talk via MQ to other services. - -1. See `Common Steps`_ before proceeding. - -2. Start the API service - - :: - - $ billingstack-api - - ... - - 2013-06-09 03:52:31 INFO [eventlet.wsgi] (2223) wsgi starting up on http://0.0.0.0:9091/ diff --git a/doc/source/install/packages.rst b/doc/source/install/packages.rst deleted file mode 100644 index a408c2e..0000000 --- a/doc/source/install/packages.rst +++ /dev/null @@ -1,34 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - - -===================== - Installing Packages -===================== - -Common Steps -============ - -.. index:: - double: installing; common_steps - - -1. apt-get install python-software-properties -2. apt-add-repository ppa:openstack-ubuntu-testing/grizzly-trunk-testing -3. echo "deb http://cloudistic.me/packages precise main" > /etc/apt/sources.list.d/billingstack.list -4. wget -q http://cloudistic.me/packages/pubkey.gpg -O- | apt-key add - -5. apt-get update -6. apt-get install billingstack-central billingstack-api \ No newline at end of file diff --git a/doc/source/install/pgp.rst b/doc/source/install/pgp.rst deleted file mode 100644 index bca05c6..0000000 --- a/doc/source/install/pgp.rst +++ /dev/null @@ -1,61 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - -Installing a PGP -================ - -.. index: - double: installing; pgp - -.. note:: - This is REQUIRED to be installed on the same machine that has access to - the database and that has the billingstack-manage command. - -.. note:: - A PGP Can be installed either inside a virtualenv where the bs core is - installed or in a system wide install. - - -Python modules -============== - -1. Clone a provider repo off of github:: - - $ git clone git@github.com:billingstack/billingstack-braintree.git - -2. Install it in the SAME environment / virtualenv as the main billingstack core:: - - $ pip install -rtools/setup-requires -rtools/pip-requires -rtools/pip-options - $ python setup.py develop - - -Registering the PGP -=================== - -.. note:: - So while the module is actually installed Python wise, it's needed to - load up some data into the database so the system knows of its existance. - -1. Install the PGP module using the process described above. - -2. Register :term:`pgp` with it's :term:`pgm`:: - - $ billingstack-manage pg-register - -3. Check the logs that the utility gives and list out registered pgp's:: - - $ billingstack-manage pg-list - diff --git a/doc/source/payment.yuml b/doc/source/payment.yuml deleted file mode 100644 index 187ed04..0000000 --- a/doc/source/payment.yuml +++ /dev/null @@ -1,8 +0,0 @@ -[plugin.Provider]1-1>[models.PGProvider] -[models.PGProvider]*-*>[models.PGMethod] -[models.PGMethod]*-1>[models.PGProvider] -[models.PGConfig]*-1>[models.PGProvider] -[models.Merchant]1-*>[models.PGConfig] -[models.Subscription]1-1>[models.PaymentMethod] -[models.Customer]1-*>[models.PaymentMethod] -[models.PaymentMethod]1-1>[models.PGMethod] diff --git a/doc/source/resources/api_filtering.rst b/doc/source/resources/api_filtering.rst deleted file mode 100644 index f7c2f93..0000000 --- a/doc/source/resources/api_filtering.rst +++ /dev/null @@ -1,104 +0,0 @@ -.. - Copyright 2013 Endre Karlson - Copyright 2013 Luis Gervaso - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _filtering: - - -========================================== -Filtering in the API (Internally and REST) -========================================== - -.. index:: - double: api_filtering; brief - - -Filtering Operators -+++++++++++++++++++ - -.. note:: Some storage plugins may not support all operatirs. - - -================= =========== -Name Operators -================= =========== -Equals eq, ==, == -Not Equals ne, != -Greater or equal le, >= -Less or equal le, <= -Greater than >, gt -Less than <, lt -Like like -Not Like nlike -================= =========== - - -Filtering in REST API -+++++++++++++++++++++ - -You can filter using "query" parameters in the URL which works very much like -doing it in other places. - -For example querying for Merchants with a name that starts with 'Cloud' you can do it like the below. - -.. code:: - - http://localhost:9091/v1/merchants?q.field=name&q.op=like&q.value=Cloud% - - -Results in a internal criteria of: - -.. code:: - - {'name': {'field': 'name', 'op': 'like', 'value': 'Cloud%'}} - - -You can also pass multi field / value queries (Same as above but also language) - -.. code:: - - http://localhost:9091/v1/merchants?q.field=lang&q.field=name&q.op=eq&q.op=like&q.value=nor&q.value=Cloud% - - -Results in a internal critera of: - -.. code:: - - { - 'name': { - 'field': 'name', 'op': 'like', 'value': 'Cloud%' - }, - 'language': { - 'field': 'language', 'op': 'eq', 'value': 'nor' - } - } - -The Params in the URL are parsed to something usable by each service that it's -sent to. - - -Filtering internally -++++++++++++++++++++ - -Filtering internally when for example doing a call directly on a api method -or towards a API method that is available over RPC you can pass Criterion dicts -like mentioned above in the "Results in internal criteria of....". - -Basically it boils down to something like: - -.. code:: - - {'fieldname': 'value'} - {'fieldname': {'op': 'eq', 'value': 'value'}} \ No newline at end of file diff --git a/doc/source/resources/index.rst b/doc/source/resources/index.rst deleted file mode 100644 index e9bcc0a..0000000 --- a/doc/source/resources/index.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. - Copyright 2013 Endre Karlson - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _install: - -========================= -Resources in Billingstack -========================= - -.. toctree:: - :maxdepth: 2 - - api_filtering - subscriptions \ No newline at end of file diff --git a/doc/source/resources/subscriptions.rst b/doc/source/resources/subscriptions.rst deleted file mode 100644 index aa0775d..0000000 --- a/doc/source/resources/subscriptions.rst +++ /dev/null @@ -1,96 +0,0 @@ -.. - Copyright 2013 Endre Karlson - Copyright 2013 Luis Gervaso - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -.. _subscription: - - -============ -Subscription -============ - -.. index:: - double: subscription; brief - -Prerequisites -+++++++++++++ - -.. note:: BillingStack does not store merchant customer users. Merchant should manage authorization. - -1. Merchant and Plan created in BillingStack - -2. bs-admin Role create in Merchant Identity Manager (e.g keystone) - -Process -+++++++ - -.. note:: Try to outline a sample subscription creation process. - -1. User registers in the merchant portal application using the merchant identity manager (e.g keystone) - - POST /v2.0/users - -2. User login in the merchant portal application using merchant identity manager (e.g keystone) - - POST /v2.0/tokens - - At this point user has an unscoped token - -3. User decides to subscribe in one of the merchant plans - - 3.1 Using the merchan API key & secret portal gathers all the available plans from BillingStack - - GET /merchants//plans - - 3.2 User select the desired plan to subscribe in - - 3.1 Since the current token is unscoped it's necessary to create customer in BillingStack - - POST /merchant//customers - - Using the customer_id obtained from BillingStack a new OpenStack tenant is created - this special tenant should be named as : bs-customer- - - POST /v2.0/tenants - - PUT /v2.0/tenants//users//role/ - - PUT /v2.0/tenants//users//role/ - - Now it is necessary exchange the unscoped token to a scoped one - - POST /v2.0/tokens - - 3.2 BillingStack subscription is created for the BillingStack customer - - 3.2.1 Create the BillingStack Subscription - - POST /merchants//subscriptions - - 3.2.2 Create a new OpenStack tenant - - POST /tenants - - This tenant should be named bs-subscription- - - 3.2.3 Add OpenStack user to the recently created tenant - - PUT /tenants//users//roles/ - - 3.2.4 Update subscription resource attribute with the tenant id from OpenStack - - PATCH /merchants//subscriptions/ - -4. Now the subscription can start receiving usage data from ceilometer tied by resource attribute diff --git a/etc/billingstack/billingstack.conf.sample b/etc/billingstack/billingstack.conf.sample deleted file mode 100644 index 30c9e3f..0000000 --- a/etc/billingstack/billingstack.conf.sample +++ /dev/null @@ -1,106 +0,0 @@ -[DEFAULT] -######################## -## General Configuration -######################## -# Show more verbose log output (sets INFO log level output) -verbose = True - -# Show debugging output in logs (sets DEBUG log level output) -debug = True - -# Top-level directory for maintaining billingstack's state -#state_path = /var/lib/billingstack - -# Log directory -#logdir = /var/log/billingstack - -allowed_rpc_exception_modules = billingstack.exceptions, billingstack.openstack.common.exception - -# Enabled API Version 1 extensions -# #enabled_extensions_v1 = none - -# CORS settings -# cors_allowed_origin = * -# cors_max_age = 3600 - -[service:api] -# Address to bind the API server -# api_host = 0.0.0.0 - -# Port the bind the API server to -#api_port = 9001 - -################################################# -# Central service -################################################# -#----------------------- -# SQLAlchemy Storage -#----------------------- -[central:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - - -################################################# -# Biller service -################################################# - -#----------------------- -# SQLAlchemy Storage -#----------------------- -[biller:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - - -################################################# -# Collector service -################################################# - -#----------------------- -# SQLAlchemy Storage -#----------------------- -[collector:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - - -################################################# -# Rater service -################################################# - -#----------------------- -# SQLAlchemy Storage -#----------------------- -[rater:sqlalchemy] -# Database connection string - to configure options for a given implementation -# like sqlalchemy or other see below -#database_connection = mysql://billingstack:billingstack@localhost:3306/billingstack -#connection_debug = 100 -#connection_trace = False -#sqlite_synchronous = True -#idle_timeout = 3600 -#max_retries = 10 -#retry_interval = 10 - diff --git a/etc/billingstack/policy.json b/etc/billingstack/policy.json deleted file mode 100644 index 0967ef4..0000000 --- a/etc/billingstack/policy.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/openstack.conf b/openstack.conf deleted file mode 100644 index 0c83a12..0000000 --- a/openstack.conf +++ /dev/null @@ -1,30 +0,0 @@ -[DEFAULT] - -# The list of modules to copy from oslo-incubator.git -module=context -module=db -module=eventlet_backdoor -module=exception -module=excutils -module=fileutils -module=gettextutils -module=importutils -module=iniparser -module=iso8601 -module=jsonutils -module=local -module=lockutils -module=log -module=loopingcall -module=network_utils -module=notifier -module=processutils -module=rpc -module=service -module=threadgroup -module=timeutils -module=utils -module=uuidutils - -# Base -base=billingstack diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 580c5ee..0000000 --- a/requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -Babel>=1.3 -pbr>=0.5.21,<1.0 -# This file is managed by openstack-depends -argparse -cliff>=1.4.3 -eventlet>=0.13.0 -extras -pecan>=0.2.0 -iso8601>=0.1.8 -netaddr>=0.7.6 -oslo.config>=1.2.0 -Paste -PasteDeploy>=1.5.0 -Routes>=1.12.3 -stevedore>=0.10 -WebOb>=1.2.3,<1.3 -WSME>=0.5b6 -# Optional Stuff that is used by default -alembic>=0.4.1 -SQLAlchemy>=0.7.8,<=0.7.99 -kombu>=2.4.8 - -# Identity -python-memcached>=1.48 -passlib - -pycountry -taskflow diff --git a/run_tests.sh b/run_tests.sh deleted file mode 100755 index 5f3d2eb..0000000 --- a/run_tests.sh +++ /dev/null @@ -1,237 +0,0 @@ -#!/bin/bash - -set -eu - -function usage { - echo "Usage: $0 [OPTION]..." - echo "Run Nova's test suite(s)" - echo "" - echo " -V, --virtual-env Always use virtualenv. Install automatically if not present" - echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment" - echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment" - echo " -r, --recreate-db Recreate the test database (deprecated, as this is now the default)." - echo " -n, --no-recreate-db Don't recreate the test database." - echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added." - echo " -u, --update Update the virtual environment with any newer package versions" - echo " -p, --pep8 Just run PEP8 and HACKING compliance check" - echo " -P, --no-pep8 Don't run static code checks" - echo " -c, --coverage Generate coverage report" - echo " -d, --debug Run tests with testtools instead of testr. This allows you to use the debugger." - echo " -h, --help Print this usage message" - echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list" - echo " --virtual-env-path Location of the virtualenv directory" - echo " Default: \$(pwd)" - echo " --virtual-env-name Name of the virtualenv directory" - echo " Default: .venv" - echo " --tools-path Location of the tools directory" - echo " Default: \$(pwd)" - echo "" - echo "Note: with no options specified, the script will try to run the tests in a virtual environment," - echo " If no virtualenv is found, the script will ask if you would like to create one. If you " - echo " prefer to run tests NOT in a virtual environment, simply pass the -N option." - exit -} - -function process_options { - i=1 - while [ $i -le $# ]; do - case "${!i}" in - -h|--help) usage;; - -V|--virtual-env) always_venv=1; never_venv=0;; - -N|--no-virtual-env) always_venv=0; never_venv=1;; - -s|--no-site-packages) no_site_packages=1;; - -r|--recreate-db) recreate_db=1;; - -n|--no-recreate-db) recreate_db=0;; - -f|--force) force=1;; - -u|--update) update=1;; - -p|--pep8) just_pep8=1;; - -P|--no-pep8) no_pep8=1;; - -c|--coverage) coverage=1;; - -d|--debug) debug=1;; - --virtual-env-path) - (( i++ )) - venv_path=${!i} - ;; - --virtual-env-name) - (( i++ )) - venv_dir=${!i} - ;; - --tools-path) - (( i++ )) - tools_path=${!i} - ;; - -*) testropts="$testropts ${!i}";; - *) testrargs="$testrargs ${!i}" - esac - (( i++ )) - done -} - -tool_path=${tools_path:-$(pwd)} -venv_path=${venv_path:-$(pwd)} -venv_dir=${venv_name:-.venv} -with_venv=tools/with_venv.sh -always_venv=0 -never_venv=0 -force=0 -no_site_packages=0 -installvenvopts= -testrargs= -testropts= -wrapper="" -just_pep8=0 -no_pep8=0 -coverage=0 -debug=0 -recreate_db=1 -update=0 - -LANG=en_US.UTF-8 -LANGUAGE=en_US:en -LC_ALL=C - -process_options $@ -# Make our paths available to other scripts we call -export venv_path -export venv_dir -export venv_name -export tools_dir -export venv=${venv_path}/${venv_dir} - -if [ $no_site_packages -eq 1 ]; then - installvenvopts="--no-site-packages" -fi - -function init_testr { - if [ ! -d .testrepository ]; then - ${wrapper} testr init - fi -} - -function run_tests { - # Cleanup *pyc - ${wrapper} find . -type f -name "*.pyc" -delete - - if [ $debug -eq 1 ]; then - if [ "$testropts" = "" ] && [ "$testrargs" = "" ]; then - # Default to running all tests if specific test is not - # provided. - testrargs="discover ./billingstack/tests" - fi - ${wrapper} python -m testtools.run $testropts $testrargs - - # Short circuit because all of the testr and coverage stuff - # below does not make sense when running testtools.run for - # debugging purposes. - return $? - fi - - if [ $coverage -eq 1 ]; then - TESTRTESTS="$TESTRTESTS --coverage" - else - TESTRTESTS="$TESTRTESTS" - fi - - # Just run the test suites in current environment - set +e - testrargs=`echo "$testrargs" | sed -e's/^\s*\(.*\)\s*$/\1/'` - TESTRTESTS="$TESTRTESTS --testr-args='--subunit $testropts $testrargs'" - if [ setup.cfg -nt billingstack.egg-info/entry_points.txt ] - then - ${wrapper} python setup.py egg_info - fi - echo "Running \`${wrapper} $TESTRTESTS\`" - if ${wrapper} which subunit-2to1 2>&1 > /dev/null - then - # subunit-2to1 is present, testr subunit stream should be in version 2 - # format. Convert to version one before colorizing. - bash -c "${wrapper} $TESTRTESTS | ${wrapper} subunit-2to1 | ${wrapper} tools/colorizer.py" - else - bash -c "${wrapper} $TESTRTESTS | ${wrapper} tools/colorizer.py" - fi - RESULT=$? - set -e - - copy_subunit_log - - if [ $coverage -eq 1 ]; then - echo "Generating coverage report in covhtml/" - # Don't compute coverage for common code, which is tested elsewhere - ${wrapper} coverage combine - ${wrapper} coverage html --include='billingstack/*' --omit='billingstack/openstack/common/*' -d covhtml -i - fi - - return $RESULT -} - -function copy_subunit_log { - LOGNAME=`cat .testrepository/next-stream` - LOGNAME=$(($LOGNAME - 1)) - LOGNAME=".testrepository/${LOGNAME}" - cp $LOGNAME subunit.log -} - -function run_pep8 { - echo "Running flake8 ..." - bash -c "${wrapper} flake8" -} - - -TESTRTESTS="python setup.py testr" - -if [ $never_venv -eq 0 ] -then - # Remove the virtual environment if --force used - if [ $force -eq 1 ]; then - echo "Cleaning virtualenv..." - rm -rf ${venv} - fi - if [ $update -eq 1 ]; then - echo "Updating virtualenv..." - python tools/install_venv.py $installvenvopts - fi - if [ -e ${venv} ]; then - wrapper="${with_venv}" - else - if [ $always_venv -eq 1 ]; then - # Automatically install the virtualenv - python tools/install_venv.py $installvenvopts - wrapper="${with_venv}" - else - echo -e "No virtual environment found...create one? (Y/n) \c" - read use_ve - if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then - # Install the virtualenv and run the test suite in it - python tools/install_venv.py $installvenvopts - wrapper=${with_venv} - fi - fi - fi -fi - -# Delete old coverage data from previous runs -if [ $coverage -eq 1 ]; then - ${wrapper} coverage erase -fi - -if [ $just_pep8 -eq 1 ]; then - run_pep8 - exit -fi - -if [ $recreate_db -eq 1 ]; then - rm -f tests.sqlite -fi - -init_testr -run_tests - -# NOTE(sirp): we only want to run pep8 when we're running the full-test suite, -# not when we're running tests individually. To handle this, we need to -# distinguish between options (testropts), which begin with a '-', and -# arguments (testrargs). -if [ -z "$testrargs" ]; then - if [ $no_pep8 -eq 0 ]; then - run_pep8 - fi -fi diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e0938f0..0000000 --- a/setup.cfg +++ /dev/null @@ -1,74 +0,0 @@ -[metadata] -name = billingstack -summary = Subscription based Billing in Python -description-file = - README.rst -author = Endre Karlson -author-email = dev@billingstack.org -home-page = http://www.billingstack.org/ -classifier = - Environment :: Any - Intended Audience :: Information Technology - Intended Audience :: Financial People - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 2.6 - -[global] -setup-hooks = - pbr.hooks.setup_hook - -[files] -packages = - billingstack -scripts = - bin/billingstack-db-manage - bin/billingstack-manage - -[entry_points] -console_scripts = - billingstack-api = billingstack.api.app:start - billingstack-biller = billingstack.biller.service:launch - billingstack-central = billingstack.central.service:launch - billingstack-collector = billingstack.collector.service:launch - billingstack-rater = billingstack.rater.service:launch - -billingstack.central.storage = - sqlalchemy = billingstack.central.storage.impl_sqlalchemy:SQLAlchemyEngine - -billingstack.collector.storage = - sqlalchemy = billingstack.collector.storage.impl_sqlalchemy:SQLAlchemyEngine - - -billingstack.biller.storage = - sqlalchemy = billingstack.biller.storage.impl_sqlalchemy:SQLAlchemyEngine - -billingstack.rater.storage = - sqlalchemy = billingstack.rater.storage.impl_sqlalchemy:SQLAlchemyEngine - -billingstack.payment_gateway = - dummy = billingstack.payment_gateway.dummy:DummyProvider - -billingstack.manage = - pg-register = billingstack.manage.provider:ProvidersRegister - pg-list = billingstack.manage.provider:ProvidersList - -[build_sphinx] -source-dir = doc/source -build-dir = doc/build -all_files = 1 - -[upload_docs] -upload-dir = doc/build/html - -[nosetests] -cover-package = billingstack -cover-html = true -cover-erase = true -cover-inclusive = true -verbosity=2 -detailed-errors=1 -where=billingstack.tests diff --git a/setup.py b/setup.py deleted file mode 100644 index 70c2b3f..0000000 --- a/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT -import setuptools - -setuptools.setup( - setup_requires=['pbr'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 05e23e8..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -# This file is managed by openstack-depends -coverage>=3.6 -discover -docutils==0.9.1 -flake8==2.0 -mock>=1.0 -mox>=0.5.3 -nose -nosehtmloutput>=0.0.3 -openstack.nose_plugin>=0.7 -python-subunit -sphinx>=1.1.2 -sphinxcontrib-httpdomain -testrepository>=0.0.17 -unittest2 diff --git a/tools/control.sh b/tools/control.sh deleted file mode 100755 index 33c9bf7..0000000 --- a/tools/control.sh +++ /dev/null @@ -1,255 +0,0 @@ -#!/usr/bin/env bash - -# script to help with BS - -# Dependencies: -# - functions - -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set -x - -# Keep track of this directory -SCRIPT_DIR=$(cd $(dirname "$0") && pwd) -BASE_DIR=${BASE_DIR:-$SCRIPT_DIR/..} -CONFIG=${CONFIG:-$BASE_DIR/etc/billingstack/billingstack.conf} - -SCREEN_NAME=${SCREEN_NAME:-billingstack} -SCREEN_LOGDIR=${SCREEN_LOGDIR:-$BASE_DIR/logs} -SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc -USE_SCREEN=$(trueorfalse True $USE_SCREEN) - -SERVICE_DIR=${SERVICE_DIR:-$BASE_DIR/status} - -SERVICES="api,central,rater,biller,collector" - -function ensure_dir() { - local dir=$1 - [ ! -d "$dir" ] && { - echo "Attempting to create $dir" - mkdir -p $dir - } -} - - -# Normalize config values to True or False -# Accepts as False: 0 no false False FALSE -# Accepts as True: 1 yes true True TRUE -# VAR=$(trueorfalse default-value test-value) -function trueorfalse() { - local default=$1 - local testval=$2 - - [[ -z "$testval" ]] && { echo "$default"; return; } - [[ "0 no false False FALSE" =~ "$testval" ]] && { echo "False"; return; } - [[ "1 yes true True TRUE" =~ "$testval" ]] && { echo "True"; return; } - echo "$default" -} - - -# _run_process() is designed to be backgrounded by run_process() to simulate a -# fork. It includes the dirty work of closing extra filehandles and preparing log -# files to produce the same logs as screen_it(). The log filename is derived -# from the service name and global-and-now-misnamed SCREEN_LOGDIR -# _run_process service "command-line" -function _run_process() { - local service=$1 - local command="$2" - - # Undo logging redirections and close the extra descriptors - exec 1>&3 - exec 2>&3 - exec 3>&- - exec 6>&- - - if [[ -n ${SCREEN_LOGDIR} ]]; then - exec 1>&${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log 2>&1 - ln -sf ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log ${SCREEN_LOGDIR}/screen-${1}.log - - # TODO(dtroyer): Hack to get stdout from the Python interpreter for the logs. - export PYTHONUNBUFFERED=1 - fi - - exec /bin/bash -c "$command" - die "$service exec failure: $command" -} - - -# run_process() launches a child process that closes all file descriptors and -# then exec's the passed in command. This is meant to duplicate the semantics -# of screen_it() without screen. PIDs are written to -# $SERVICE_DIR/$SCREEN_NAME/$service.pid -# run_process service "command-line" -function run_process() { - local service=$1 - local command="$2" - - # Spawn the child process - _run_process "$service" "$command" & - echo $! -} - - - -# Helper to launch a service in a named screen -# screen_it service "command-line" -function screen_it { - - if is_service_enabled $1; then - # Append the service to the screen rc file - screen_rc "$1" "$2" - - if [[ "$USE_SCREEN" = "True" ]]; then - screen -S $SCREEN_NAME -X screen -t $1 - - if [[ -n ${SCREEN_LOGDIR} ]]; then - screen -S $SCREEN_NAME -p $1 -X logfile ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log - screen -S $SCREEN_NAME -p $1 -X log on - ln -sf ${SCREEN_LOGDIR}/screen-${1}.${CURRENT_LOG_TIME}.log ${SCREEN_LOGDIR}/screen-${1}.log - fi - - # sleep to allow bash to be ready to be send the command - we are - # creating a new window in screen and then sends characters, so if - # bash isn't running by the time we send the command, nothing happens - sleep 1.5 - - NL=`echo -ne '\015'` - screen -S $SCREEN_NAME -p $1 -X stuff "$2 || touch \"$SERVICE_DIR/$SCREEN_NAME/$1.failure\"$NL" - else - # Spawn directly without screen - run_process "$1" "$2" >$SERVICE_DIR/$SCREEN_NAME/$service.pid - fi - fi -} - - -# Screen rc file builder -# screen_rc service "command-line" -function screen_rc { - if [[ ! -e $SCREENRC ]]; then - # Name the screen session - echo "sessionname $SCREEN_NAME" > $SCREENRC - # Set a reasonable statusbar - echo "hardstatus alwayslastline '$SCREEN_HARDSTATUS'" >> $SCREENRC - echo "screen -t shell bash" >> $SCREENRC - fi - # If this service doesn't already exist in the screenrc file - if ! grep $1 $SCREENRC 2>&1 > /dev/null; then - NL=`echo -ne '\015'` - echo "screen -t $1 bash" >> $SCREENRC - echo "stuff \"$2$NL\"" >> $SCREENRC - fi -} - -# Uses global ``ENABLED_SERVICES`` -# is_service_enabled service [service ...] -function is_service_enabled() { - services=$@ - return 0 -} - - -function screen_setup() { - - # Set up logging of screen windows - # Set ``SCREEN_LOGDIR`` to turn on logging of screen windows to the - # directory specified in ``SCREEN_LOGDIR``, we will log to the the file - # ``screen-$SERVICE_NAME-$TIMESTAMP.log`` in that dir and have a link - # ``screen-$SERVICE_NAME.log`` to the latest log file. - # Logs are kept for as long specified in ``LOGDAYS``. - if [[ -n "$SCREEN_LOGDIR" ]]; then - - # We make sure the directory is created. - if [[ -d "$SCREEN_LOGDIR" ]]; then - # We cleanup the old logs - find $SCREEN_LOGDIR -maxdepth 1 -name screen-\*.log -mtime +$LOGDAYS -exec rm {} \; - else - ensure_dir $SCREEN_LOGDIR - fi - fi - - if [[ ! -d "$SERVICE_DIR/$SCREEN_NAME" ]]; then - mkdir -p "$SERVICE_DIR/$SCREEN_NAME" - fi - - USE_SCREEN=$(trueorfalse True $USE_SCREEN) - if [[ "$USE_SCREEN" == "True" ]]; then - # Create a new named screen to run processes in - screen -d -m -S $SCREEN_NAME -t shell -s /bin/bash - sleep 1 - - # Set a reasonable status bar - if [ -z "$SCREEN_HARDSTATUS" ]; then - SCREEN_HARDSTATUS='%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})' - fi - screen -r $SCREEN_NAME -X hardstatus alwayslastline "$SCREEN_HARDSTATUS" - fi - - # Clear screen rc file - SCREENRC=$BASE_DIR/$SCREEN_NAME-screenrc - if [[ -e $SCREENRC ]]; then - echo -n > $SCREENRC - fi -} - - -screen_is_running() { - # Check to see if we are already running DevStack - # Note that this may fail if USE_SCREEN=False - if type -p screen >/dev/null && screen -ls | egrep -q "[0-9].$SCREEN_NAME"; then - echo "Already running a session." - echo "To rejoin this session type 'screen -x $SCREEN_NAME'." - echo "To destroy this session, type './$0 stop'." - exit 1 - fi -} - - -function screen_destroy() { - SCREEN=$(which screen) - if [[ -n "$SCREEN" ]]; then - SESSION=$(screen -ls | awk '/[0-9].billingstack/ { print $1 }') - if [[ -n "$SESSION" ]]; then - screen -X -S $SESSION quit - fi - fi - - rm -f "$SERVICE_DIR/$SCREEN_NAME"/*.failure -} - - -function start_svc() { - svc="$(echo "$1" | sed 's/bs-//')" - echo "Starting service: $svc" - screen_it bs-$svc "billingstack-$svc --config-file $CONFIG" -} - - - -function start() { - local svc=$1 - - [ "$svc" == 'all' ] && { - for s in $(echo "$SERVICES" | tr ',' ' '); do - start_svc $s - done - return - } - start_svc $svc -} - - -case $1 in - start) - screen_is_running - screen_setup - - svc=$2 - [ -z "$svc" ] && svc=all - echo "Starting service(s): $svc" - start $svc - ;; - stop) - screen_destroy - ;; -esac diff --git a/tools/load_samples.py b/tools/load_samples.py deleted file mode 100644 index 0d8be1e..0000000 --- a/tools/load_samples.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack import service -from billingstack.samples import get_samples -from billingstack.storage.utils import get_connection -from billingstack.openstack.common.context import get_admin_context - - -cfg.CONF.import_opt('storage_driver', 'billingstack.central', - group='service:central') - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -cfg.CONF.import_opt( - 'database_connection', - 'billingstack.central.storage.impl_sqlalchemy', - group='central:sqlalchemy') - - -SAMPLES = get_samples() - - -def get_fixture(name, fixture=0, values={}): - f = SAMPLES[name][fixture].copy() - f.update(values) - return f - - -if __name__ == '__main__': - service.prepare_service(sys.argv) - conn = get_connection('central') - - samples = get_samples() - - ctxt = get_admin_context() - - currencies = {} - for c in samples['currency']: - currencies[c['name']] = conn.create_currency(ctxt, c) - - languages = {} - for l in samples['language']: - languages[l['name']] = conn.create_language(ctxt, l) - - country_data = { - "currency_name": currencies['nok']['name'], - "language_name": languages['nor']['name']} - - merchant = conn.create_merchant( - ctxt, get_fixture('merchant', values=country_data)) - - customer = conn.create_customer( - ctxt, merchant['id'], get_fixture('customer', values=country_data)) - - #contact_info = get_fixture('contact_info') - - #merchant_user = get_fixture('user') - #merchant_user['username'] = 'demo_merchant' - #merchant_user['contact_info'] = contact_info - - #merchant_user = conn.user_add( - #ctxt, merchant['id'], merchant_user) - - #customer_user = get_fixture('user') - #customer_user['username'] = 'demo_customer' - #customer_user['contact_info'] = contact_info - #customer_user['customer_id'] = customer['id'] - - #customer_user = conn.user_add( - # ctxt, - # merchant['id'], - # customer_user) - - products = {} - for p in samples['product']: - products[p['name']] = conn.create_product(ctxt, merchant['id'], p) - - values = { - 'plan_items': [ - {'product_id': products['memory']}, - {'product_id': products['vcpus']}, - {'product_id': products['root_disk_size']}, - {'product_id': products['network.incoming.bytes']}, - {'product_id': products['network.outgoing.bytes']}]} - - plan = get_fixture('plan', values=values) - - conn.create_plan(ctxt, merchant['id'], get_fixture('plan')) diff --git a/tools/resync_storage.py b/tools/resync_storage.py deleted file mode 100644 index dc87337..0000000 --- a/tools/resync_storage.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python - -import sys - -from oslo.config import cfg - -from billingstack.openstack.common import log as logging -from billingstack import service -from billingstack.storage.utils import get_connection - -# NOTE: make this based on entrypoints ? -SERVICES = ['biller', 'central', 'collector', 'rater'] - -LOG = logging.getLogger(__name__) - -cfg.CONF.import_opt('state_path', 'billingstack.paths') - -cfg.CONF.register_cli_opt(cfg.StrOpt('services', default=SERVICES)) -cfg.CONF.register_cli_opt(cfg.BoolOpt('resync', default=False)) - - -def resync_service_storage(service, resync=False): - """ - Resync the storage for a service - """ - connection = get_connection(service) - if resync: - connection.teardown_schema() - connection.setup_schema() - - -if __name__ == '__main__': - service.prepare_service(sys.argv) - - services = cfg.CONF.services - for svc in services: - LOG.info("Doing storage for %s" % svc) - resync_service_storage(svc, resync=cfg.CONF.resync) diff --git a/tools/with_venv.sh b/tools/with_venv.sh deleted file mode 100755 index 63f5b98..0000000 --- a/tools/with_venv.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -TOOLS=`dirname $0` -VENV=$TOOLS/../.venv -source $VENV/bin/activate && "$@" diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 50462a0..0000000 --- a/tox.ini +++ /dev/null @@ -1,39 +0,0 @@ -[tox] -envlist = py26,py27,pep8 - -[testenv] -#usedevelop = True -install_command = pip install {opts} {packages} -setenv = VIRTUAL_ENV={envdir} -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt - setuptools_git>=0.4 -commands = python setup.py testr --slowest --testr-args='{posargs}' - -[tox:jenkins] -downloadcache = ~/cache/pip - -[testenv:pep8] -deps = flake8 -commands = - flake8 - -[testenv:cover] -commands = - python setup.py testr --coverage --testr-args='{posargs}' - -[testenv:venv] -commands = {posargs} - -[flake8] -# E711/E712 comparison to False should be 'if cond is False:' or 'if not cond:' -# query = query.filter(Component.disabled == False) -# E125 continuation line does not distinguish itself from next logical line -# H301 one import per line -# H302 import only modules -# TODO(marun) H404 multi line docstring should start with a summary -# TODO(marun) H901,902 use the not operator inline for clarity -# TODO(markmcclain) H202 assertRaises Exception too broad -ignore = E711,E712,E125,H301,H302,H404,H901,H902,H202 -show-source = true -exclude = .venv,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tests,build