diff --git a/.gitignore b/.gitignore index f8395e9..cc1d3f5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,17 +1,18 @@ -*.pyc -.DS_STORE -build -dist *.egg-info +*.pyc *.pyo -.coverage -coverage -reports -run_it.py *.sublime-project *.sublime-workspace +.coverage +.DS_STORE .idea -junit build +coverage dist +junit +reports +run_it.py syncano.egg-info +.tox/* +test +.python-version diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..4879d18 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,5 @@ +[settings] +line_length=120 +multi_line_output=3 +default_section=THIRDPARTY +skip=base.py,.tox,conf.py diff --git a/.pypirc.template b/.pypirc.template new file mode 100644 index 0000000..527026a --- /dev/null +++ b/.pypirc.template @@ -0,0 +1,7 @@ +[distutils] # this tells distutils what package indexes you can push to +index-servers = pypi + +[pypi] +repository: https://pypi.python.org/pypi +username: +password: diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..9561fb1 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include README.rst diff --git a/README.md b/README.md deleted file mode 100644 index b769961..0000000 --- a/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Syncano v4.0 - -[Docs](http://syncano.github.io/syncano-python/) - - -## Backwards incompatible changes - -Version 4.0 is designed for new release of Syncano platform and -it's **not compatible** with any previous releases. - -Code from `0.6.x` release is avalable on [stable/0.6.x](https://github.com/Syncano/syncano-python/tree/stable/0.6.x) branch -and it can be installed directly from pip via: - -``` -pip install syncano==0.6.2 --pre -``` \ No newline at end of file diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..d81fd05 --- /dev/null +++ b/README.rst @@ -0,0 +1,26 @@ +Syncano +======= + +Build Status +------------ + +**Master** + +.. image:: https://circleci.com/gh/Syncano/syncano-python/tree/master.svg?style=svg&circle-token=738c379fd91cc16b82758e6be89d0c21926655e0 + :target: https://circleci.com/gh/Syncano/syncano-python/tree/master + +**Develop** + +.. image:: https://circleci.com/gh/Syncano/syncano-python/tree/develop.svg?style=svg&circle-token=738c379fd91cc16b82758e6be89d0c21926655e0 + :target: https://circleci.com/gh/Syncano/syncano-python/tree/develop + +Python QuickStart Guide +----------------------- + +You can find quick start on installing and using Syncano's Python library in our `documentation `_. + +For more detailed information on how to use Syncano and its features - our `Developer Manual `_ should be very helpful. + +In case you need help working with the library - email us at libraries@syncano.com - we will be happy to help! + +You can also find library reference hosted on GitHub pages `here `_. diff --git a/circle.yml b/circle.yml index b150713..dcfc0a2 100644 --- a/circle.yml +++ b/circle.yml @@ -4,17 +4,14 @@ machine: dependencies: pre: - - pip install coverage>=3.7.1 - - pip install mock>=1.0.1 - - pip install flake8 + - pip install -U setuptools + - pip install -r requirements-test.txt + post: + - pyenv local 3.4.3 2.7.6 test: override: - - flake8 . - - coverage run -m unittest discover -p 'test*.py' - - coverage html -d coverage/unittest - - coverage run -m unittest discover -p 'integration_test*.py' - - coverage html -d coverage/integration + - tox general: artifacts: @@ -30,4 +27,5 @@ deployment: - pip install -r requirements-docs.txt - git config --global user.email "ci@circleci.com" - git config --global user.name "CircleCI" - - "cd docs && make gh-pages" \ No newline at end of file + - "cd docs && make gh-pages" + - ./release.sh diff --git a/docs/source/conf.py b/docs/source/conf.py index ee4c172..a753817 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,6 +19,8 @@ import sphinx_rtd_theme sys.path.insert(1, dirname(dirname(dirname(abspath(__file__))))) +from syncano.models.fields import RelatedManagerField + needs_sphinx = '1.0' extensions = [ @@ -108,3 +110,5 @@ autodoc_member_order = 'bysource' highlight_language = 'python' + +RelatedManagerField.__get__ = lambda self, *args, **kwargs: self diff --git a/docs/source/custom_sockets.rst b/docs/source/custom_sockets.rst new file mode 100644 index 0000000..787e564 --- /dev/null +++ b/docs/source/custom_sockets.rst @@ -0,0 +1,303 @@ +.. _custom-sockets: + +========================= +Custom Sockets in Syncano +========================= + +``Syncano`` gives its users the ability to create Custom Sockets. What this means is that users can define very specific +endpoints in their Syncano application, and use them exactly like they would any other Syncano +module (Classes, Scripts, etc), using standard API calls. +Currently, Custom Sockets allow only one dependency - Scripts. Under the hood, +each API call executes a Script, and the result of this execution is returned as a result of the +API call. + +Creating a custom Socket +------------------------ + +To create a custom Socket follow these steps:: + + import syncano + from syncano.models import CustomSocket, Endpoint, ScriptCall, ScriptDependency, RuntimeChoices + from syncano.connection import Connection + + # 1. Initialize a custom Socket. + custom_socket = CustomSocket(name='my_custom_socket') # this will create an object in place (do API call) + + # 2. Define endpoints. + my_endpoint = Endpoint(name='my_endpoint') # no API call here + my_endpoint.add_call(ScriptCall(name='custom_script', methods=['GET'])) + my_endpoint.add_call(ScriptCall(name='another_custom_script', methods=['POST'])) + + # What happened here: + # - We defined a new endpoint that will be visible under the name `my_endpoint` + # - You will be able to call this endpoint (execute attached `call`), + # by sending a request, using any defined method to the following API route: + # :///instances//endpoints/sockets/my_endpoint/ + # - To get details for that endpoint, you need to send a GET request to following API route: + # :///instances//sockets/my_custom_socket/endpoints/my_endpoint/ + # + # Following the example above - we defined two calls on our endpoint with the `add_call` method + # The first one means that using a GET method will call the `custom_script` Script, + # and second one means that using a POST method will call the `another_custom_script` Script. + # At the moment, only Scripts are available as endpoint calls. + # + # As a general rule - to get endpoint details (but not call them), use following API route: + # :///instances//sockets/my_custom_socket/endpoints// + # and to run your endpoints (e.g. execute Script connected to them), use following API route: + # :///instances//endpoints/sockets// + + # 3. After creation of the endpoint, add it to your custom_socket. + custom_socket.add_endpoint(my_endpoint) + + # 4. Define dependency. + # 4.1 Using a new Script - define a new source code. + custom_socket.add_dependency( + ScriptDependency( + Script( + runtime_name=RuntimeChoices.PYTHON_V5_0, + source='print("custom_script")' + ), + name='custom_script' + ) + ) + # 4.2 Using an existing Script. + another_custom_script = Script.please.get(id=2) + custom_socket.add_dependency( + ScriptDependency( + another_custom_script, + name='another_custom_script', + ) + ) + + # 4.3 Using an existing ScriptEndpoint. + script_endpoint = ScriptEndpoint.please.get(name='script_endpoint_name') + custom_socket.add_dependency( + script_endpoint + ) + + # 5. Install custom_socket. + custom_socket.install() # this will make an API call and create a script; + +It may take some time to set up the Socket, so you can check the status. +It's possible to check the custom Socket status:: + + # Reload will refresh object using Syncano API. + custom_socket.reload() + print(custom_socket.status) + # and + print(custom_socket.status_info) + +Updating the custom Socket +-------------------------- + +To update custom Socket, use:: + + custom_socket = CustomSocket.please.get(name='my_custom_socket') + + # to remove endpoint/dependency + + custom_socket.remove_endpoint(endpoint_name='my_endpoint') + custom_socket.remove_dependency(dependency_name='custom_script') + + # or to add a new endpoint/dependency: + + custom_socket.add_endpoint(new_endpoint) # see above code for endpoint examples; + custom_socket.add_dependency(new_dependency) # see above code for dependency examples; + + # save changes on Syncano + + custom_socket.update() + + +Running custom Socket +------------------------- + +To run a custom Socket use:: + + # this will run `my_endpoint` - and call `custom_script` using GET method; + result = custom_socket.run(method='GET', endpoint_name='my_endpoint') + + +Read all endpoints in a custom Socket +----------------------------------- + +To get the all defined endpoints in a custom Socket run:: + + endpoints = custom_socket.get_endpoints() + + for endpoint in endpoints: + print(endpoint.name) + print(endpoint.calls) + +To run a particular endpoint:: + + endpoint.run(method='GET') + # or: + endpoint.run(method='POST', data={'name': 'test_name'}) + +Data will be passed to the API call in the request body. + +Read all endpoints +------------------ + +To get all endpoints that are defined in all custom Sockets:: + + socket_endpoint_list = SocketEndpoint.get_all_endpoints() + +Above code will return a list with SocketEndpoint objects. To run an endpoint, +choose one endpoint first, e.g.: + + endpoint = socket_endpoint_list[0] + +and now run it:: + + endpoint.run(method='GET') + # or: + endpoint.run(method='POST', data={'custom_data': 1}) + +Custom Sockets endpoints +------------------------ + +Each custom socket requires defining at least one endpoint. This endpoint is defined by name and +a list of calls. Each call is defined by its name and a list of methods. `name` is used as an +identification for the dependency, eg. if `name` is equal to 'my_script' - the ScriptEndpoint with name 'my_script' +will be used (if it exists and Script source and passed runtime match) -- otherwise a new one will be created. +There's a special wildcard method: `methods=['*']` - this allows you to execute the provided custom Socket +with any request method (GET, POST, PATCH, etc.). + +To add an endpoint to a chosen custom_socket use:: + + my_endpoint = Endpoint(name='my_endpoint') # no API call here + my_endpoint.add_call(ScriptCall(name='custom_script'), methods=['GET']) + my_endpoint.add_call(ScriptCall(name='another_custom_script'), methods=['POST']) + + custom_socket.add_endpoint(my_endpoint) + +Custom Socket dependency +------------------------ + +Each custom socket has a dependency -- meta information for an endpoint: which resource +should be used to return the API call results. These dependencies are bound to the endpoints call object. +Currently the only supported dependency is a Script. + +**Using new Script** + +:: + + custom_socket.add_dependency( + ScriptDependency( + Script( + runtime_name=RuntimeChoices.PYTHON_V5_0, + source='print("custom_script")' + ), + name='custom_script' + ) + ) + + +**Using defined Script** + +:: + + another_custom_script = Script.please.get(id=2) + custom_socket.add_dependency( + ScriptDependency( + another_custom_script, + name='another_custom_script' + ) + ) + +**Using defined Script endpoint** + +:: + + script_endpoint = ScriptEndpoint.please.get(name='script_endpoint_name') + custom_socket.add_dependency( + script_endpoint + ) + +You can overwrite the Script name in the following way:: + + script_endpoint = ScriptEndpoint.please.get(name='script_endpoint_name') + custom_socket.add_dependency( + script_endpoint, + name='custom_name' + ) + +** Class dependency ** + +Custom socket with this dependency will check if this class is defined - if not then will create it; +This allows you to define which classes are used to store data for this particular custom socket. + +:: + + custom_socket.add_dependency( + ClassDependency( + Class( + name='class_dep_test', + schema=[ + {'name': 'test', 'type': 'string'} + ] + ), + ) + ) + +Existing class:: + + class_instance = Class.plase.get(name='user_profile') + custom_socket.add_dependency( + ClassDependency( + class_instance + ) + ) + +Custom Socket recheck +--------------------- + +The creation of a Socket can fail - this can happen, for example, when an endpoint name is already taken by another +custom Socket. To check the creation status use:: + + print(custom_socket.status) + print(custom_socket.status_info) + +You can also re-check a Socket. This mean that all dependencies will be checked - if some of them are missing +(e.g. some were deleted by mistake), they will be created again. If the endpoints and dependencies do not meet +the criteria - an error will be returned in the status field. + +Custom Socket - install from url +-------------------------------- + +To install a socket from url use:: + + CustomSocket(name='new_socket_name').install_from_url(url='https://...') + +If instance name was not provided in connection arguments, do:: + + CustomSocket(name='new_socket_name').install_from_url(url='https://...', instance_name='instance_name') + +Custom Socket - raw format +-------------------------- + +If you prefer raw JSON format for creating Sockets, the Python library allows you to do so:::: + + CustomSocket.please.create( + name='my_custom_socket_3', + endpoints={ + "my_endpoint_3": { + "calls": + [ + {"type": "script", "name": "my_script_3", "methods": ["POST"]} + ] + } + }, + dependencies=[ + { + "type": "script", + "runtime_name": "python_library_v5.0", + "name": "my_script_3", + "source": "print(3)" + } + ] + ) + +The disadvantage of this method is that the internal structure of the JSON file must be known by the developer. diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index 3eed7eb..823b1b7 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -72,16 +72,17 @@ Making Connections >>> import syncano >>> connection = syncano.connect(email='YOUR_EMAIL', password='YOUR_PASSWORD') -If you want to connect directly to chosen instance you can use :func:`~syncano.connect_instance` function:: +If you want to use instance in connection you can use :func:`~syncano.connect` function, +then you can omit the instance_name in other calls:: >>> import syncano - >>> connection = syncano.connect_instance('instance_name', email='YOUR_EMAIL', password='YOUR_PASSWORD') + >>> connection = syncano.connect(instance_name='instance_name', email='YOUR_EMAIL', password='YOUR_PASSWORD') If you have obtained your ``Account Key`` from the website you can omit ``email`` & ``password`` and pass ``Account Key`` directly to connection: >>> import syncano >>> connection = syncano.connect(api_key='YOUR_API_KEY') - >>> connection = syncano.connect_instance('instance_name', api_key='YOUR_API_KEY') + >>> connection = syncano.connect(instance_name='instance_name', api_key='YOUR_API_KEY') Troubleshooting Connections @@ -127,8 +128,8 @@ Each model has a different set of fields and commands. For more information chec Next Steps ---------- -If you'd like more information on interacting with Syncano, check out the :ref:`interacting tutorial` or if you -want to know what kind of models are available check out the :ref:`available models ` list. +If you'd like more information on interacting with Syncano, check out the :ref:`interacting tutorial` +or if you want to know what kind of models are available check out the :ref:`available models ` list. diff --git a/docs/source/index.rst b/docs/source/index.rst index c6ba193..1aaa32c 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -19,7 +19,7 @@ Contents: getting_started interacting - models + custom_sockets refs/syncano diff --git a/docs/source/interacting.rst b/docs/source/interacting.rst index e4dc680..aab3e4f 100644 --- a/docs/source/interacting.rst +++ b/docs/source/interacting.rst @@ -206,8 +206,6 @@ to :meth:`~syncano.models.manager.Manager.list` method:: >>> ApiKey.please.list(instance_name='test-one') [...] - >>> ApiKey.please.list('test-one') - [...] This performs a **GET** request to ``/v1/instances/test-one/api_keys/``. @@ -226,7 +224,7 @@ all :class:`~syncano.models.base.Instance` objects will have backward relation t >>> instance = Instance.please.get('test-one') >>> instance.api_keys.list() [...] - >>> instance.api_keys.get(1) + >>> instance.api_keys.get(id=1) .. note:: @@ -268,4 +266,4 @@ Some settings can be overwritten via environmental variables e.g: $ export SYNCANO_INSTANCE=test .. warning:: - **DEBUG** loglevel will **disbale** SSL cert check. + **DEBUG** loglevel will **disable** SSL cert check. diff --git a/docs/source/models.rst b/docs/source/models.rst deleted file mode 100644 index 8e6217c..0000000 --- a/docs/source/models.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _models: - -================ -Available models -================ - - -.. automodule:: syncano.models.base - :members: - :noindex: diff --git a/docs/source/refs/syncano.models.accounts.rst b/docs/source/refs/syncano.models.accounts.rst new file mode 100644 index 0000000..b97b663 --- /dev/null +++ b/docs/source/refs/syncano.models.accounts.rst @@ -0,0 +1,7 @@ +syncano.models.accounts +======================= + +.. automodule:: syncano.models.accounts + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.archetypes.rst b/docs/source/refs/syncano.models.archetypes.rst new file mode 100644 index 0000000..498041a --- /dev/null +++ b/docs/source/refs/syncano.models.archetypes.rst @@ -0,0 +1,7 @@ +syncano.models.archetypes +========================= + +.. automodule:: syncano.models.archetypes + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.base.rst b/docs/source/refs/syncano.models.base.rst deleted file mode 100644 index bfc76bd..0000000 --- a/docs/source/refs/syncano.models.base.rst +++ /dev/null @@ -1,7 +0,0 @@ -syncano.models.base -=================== - -.. automodule:: syncano.models.base - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/refs/syncano.models.billing.rst b/docs/source/refs/syncano.models.billing.rst new file mode 100644 index 0000000..a89281c --- /dev/null +++ b/docs/source/refs/syncano.models.billing.rst @@ -0,0 +1,7 @@ +syncano.models.billing +====================== + +.. automodule:: syncano.models.billing + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.channels.rst b/docs/source/refs/syncano.models.channels.rst new file mode 100644 index 0000000..e1f93a0 --- /dev/null +++ b/docs/source/refs/syncano.models.channels.rst @@ -0,0 +1,7 @@ +syncano.models.channels +======================= + +.. automodule:: syncano.models.channels + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.classes.rst b/docs/source/refs/syncano.models.classes.rst new file mode 100644 index 0000000..6482438 --- /dev/null +++ b/docs/source/refs/syncano.models.classes.rst @@ -0,0 +1,7 @@ +syncano.models.classes +====================== + +.. automodule:: syncano.models.classes + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.custom_response.rst b/docs/source/refs/syncano.models.custom_response.rst new file mode 100644 index 0000000..4fcfc3c --- /dev/null +++ b/docs/source/refs/syncano.models.custom_response.rst @@ -0,0 +1,7 @@ +syncano.models.custom_response +============================== + +.. automodule:: syncano.models.custom_response + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.custom_sockets.rst b/docs/source/refs/syncano.models.custom_sockets.rst new file mode 100644 index 0000000..3cecb71 --- /dev/null +++ b/docs/source/refs/syncano.models.custom_sockets.rst @@ -0,0 +1,7 @@ +syncano.models.custom_sockets +============================= + +.. automodule:: syncano.models.custom_sockets + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.custom_sockets_utils.rst b/docs/source/refs/syncano.models.custom_sockets_utils.rst new file mode 100644 index 0000000..dec7aba --- /dev/null +++ b/docs/source/refs/syncano.models.custom_sockets_utils.rst @@ -0,0 +1,7 @@ +syncano.models.custom_sockets_utils +=================================== + +.. automodule:: syncano.models.custom_sockets_utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.data_views.rst b/docs/source/refs/syncano.models.data_views.rst new file mode 100644 index 0000000..f4d810a --- /dev/null +++ b/docs/source/refs/syncano.models.data_views.rst @@ -0,0 +1,7 @@ +syncano.models.data_views +========================= + +.. automodule:: syncano.models.data_views + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.geo.rst b/docs/source/refs/syncano.models.geo.rst new file mode 100644 index 0000000..d9eee0a --- /dev/null +++ b/docs/source/refs/syncano.models.geo.rst @@ -0,0 +1,7 @@ +syncano.models.geo +================== + +.. automodule:: syncano.models.geo + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.hosting.rst b/docs/source/refs/syncano.models.hosting.rst new file mode 100644 index 0000000..48a9639 --- /dev/null +++ b/docs/source/refs/syncano.models.hosting.rst @@ -0,0 +1,7 @@ +syncano.models.hosting +====================== + +.. automodule:: syncano.models.hosting + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.incentives.rst b/docs/source/refs/syncano.models.incentives.rst new file mode 100644 index 0000000..a387150 --- /dev/null +++ b/docs/source/refs/syncano.models.incentives.rst @@ -0,0 +1,7 @@ +syncano.models.incentives +========================= + +.. automodule:: syncano.models.incentives + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.instances.rst b/docs/source/refs/syncano.models.instances.rst new file mode 100644 index 0000000..725f813 --- /dev/null +++ b/docs/source/refs/syncano.models.instances.rst @@ -0,0 +1,7 @@ +syncano.models.instances +======================== + +.. automodule:: syncano.models.instances + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.push_notification.rst b/docs/source/refs/syncano.models.push_notification.rst new file mode 100644 index 0000000..ea58efe --- /dev/null +++ b/docs/source/refs/syncano.models.push_notification.rst @@ -0,0 +1,7 @@ +syncano.models.push_notification +================================ + +.. automodule:: syncano.models.push_notification + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/refs/syncano.models.rst b/docs/source/refs/syncano.models.rst index f89b391..e22a4b5 100644 --- a/docs/source/refs/syncano.models.rst +++ b/docs/source/refs/syncano.models.rst @@ -6,11 +6,21 @@ Submodules .. toctree:: - syncano.models.base - syncano.models.fields syncano.models.manager + syncano.models.accounts + syncano.models.archetypes + syncano.models.billing + syncano.models.channels + syncano.models.classes + syncano.models.custom_response + syncano.models.data_views + syncano.models.incentives + syncano.models.instances + syncano.models.fields syncano.models.options + syncano.models.push_notification syncano.models.registry + syncano.models.traces Module contents --------------- diff --git a/docs/source/refs/syncano.models.traces.rst b/docs/source/refs/syncano.models.traces.rst new file mode 100644 index 0000000..e530491 --- /dev/null +++ b/docs/source/refs/syncano.models.traces.rst @@ -0,0 +1,7 @@ +syncano.models.traces +===================== + +.. automodule:: syncano.models.traces + :members: + :undoc-members: + :show-inheritance: diff --git a/release.sh b/release.sh new file mode 100755 index 0000000..0682dbc --- /dev/null +++ b/release.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +sed "s//$PYPI_USER/;s//$PYPI_PASSWORD/" < ~/syncano-python/.pypirc.template > ~/.pypirc +python setup.py register -r pypi +python setup.py sdist upload -r pypi diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..aff493c --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1 @@ +tox==2.3.1 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..70cdd96 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,15 @@ +Unidecode==0.4.18 +coverage==3.7.1 +pep8==1.5.7 +flake8==2.4.1 +funcsigs==0.4 +isort==4.0.0 +mccabe==0.3.1 +mock==1.3.0 +nose==1.3.7 +pbr==1.6.0 +pyflakes==0.8.1 +python-slugify==0.1.0 +requests==2.7.0 +six==1.9.0 +validictory==1.0.0 diff --git a/run_tests.sh b/run_tests.sh new file mode 100755 index 0000000..ddc6f6b --- /dev/null +++ b/run_tests.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -e + +flake8 . +isort --recursive --check-only . + +coverage run -m unittest discover -p 'test*.py' +coverage html -d coverage/unittest +coverage run -m unittest discover -p 'integration_test*.py' +coverage html -d coverage/integration diff --git a/setup.py b/setup.py index 9e6815a..587588b 100644 --- a/setup.py +++ b/setup.py @@ -1,10 +1,9 @@ from setuptools import find_packages, setup - from syncano import __version__ def readme(): - with open('README.md') as f: + with open('README.rst') as f: return f.read() setup( @@ -12,23 +11,26 @@ def readme(): version=__version__, description='Python Library for syncano.com api', long_description=readme(), - author='Daniel Kopka', - author_email='daniel.kopka@syncano.com', + author='Syncano', + author_email='support@syncano.io', url='http://syncano.com', - packages=find_packages(), - test_suite='tests', + packages=find_packages(exclude=['tests']), zip_safe=False, classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.4', ], install_requires=[ 'requests==2.7.0', - 'certifi', - 'six==1.9.0', + 'certifi==2015.09.06.2', + 'ndg-httpsclient==0.4.0', + 'pyasn1==0.1.8', + 'pyOpenSSL==0.15.1', 'python-slugify==0.1.0', + 'six==1.9.0', 'validictory==1.0.0', ], tests_require=[ diff --git a/syncano/__init__.py b/syncano/__init__.py index 90d5f77..a50f4be 100644 --- a/syncano/__init__.py +++ b/syncano/__init__.py @@ -2,10 +2,14 @@ import os __title__ = 'Syncano Python' -__version__ = '4.0.0' -__author__ = 'Daniel Kopka' +__version__ = '5.4.6' +__author__ = "Daniel Kopka, Michal Kobus and Sebastian Opalczynski" +__credits__ = ["Daniel Kopka", + "Michal Kobus", + "Sebastian Opalczynski", + "Robert Kopaczewski"] +__copyright__ = 'Copyright 2016 Syncano' __license__ = 'MIT' -__copyright__ = 'Copyright 2015 Syncano' env_loglevel = os.getenv('SYNCANO_LOGLEVEL', 'INFO') loglevel = getattr(logging, env_loglevel.upper(), None) @@ -29,6 +33,7 @@ PASSWORD = os.getenv('SYNCANO_PASSWORD') APIKEY = os.getenv('SYNCANO_APIKEY') INSTANCE = os.getenv('SYNCANO_INSTANCE') +PUSH_ENV = os.getenv('SYNCANO_PUSH_ENV', 'production') def connect(*args, **kwargs): @@ -42,7 +47,16 @@ def connect(*args, **kwargs): :param password: Your Syncano password :type api_key: string - :param api_key: Your Syncano account key + :param api_key: Your Syncano account key or instance api_key + + :type username: string + :param username: Instance user name + + :type user_key: string + :param user_key: Instance user key + + :type instance_name: string + :param instance_name: Your Syncano instance_name :type verify_ssl: boolean :param verify_ssl: Verify SSL certificate @@ -52,45 +66,25 @@ def connect(*args, **kwargs): Usage:: + # Admin login connection = syncano.connect(email='', password='') + # OR connection = syncano.connect(api_key='') - """ - from syncano.connection import default_connection - from syncano.models import registry - - default_connection.open(*args, **kwargs) - if INSTANCE: - registry.set_default_instance(INSTANCE) - return registry + # OR + connection = syncano.connect(social_backend='github', token='sfdsdfsdf') - -def connect_instance(name=None, *args, **kwargs): + # User login + connection = syncano.connect(username='', password='', api_key='', instance_name='') + # OR + connection = syncano.connect(user_key='', api_key='', instance_name='') """ - Connects with Syncano API and tries to load instance with provided name. - - :type name: string - :param name: Chosen instance name - - :type email: string - :param email: Your Syncano account email address - - :type password: string - :param password: Your Syncano password - - :type api_key: string - :param api_key: Your Syncano account key - - :type verify_ssl: boolean - :param verify_ssl: Verify SSL certificate - - :rtype: :class:`syncano.models.base.Instance` - :return: Instance object + from syncano.connection import DefaultConnection + from syncano.models import registry - Usage:: + registry.set_default_connection(DefaultConnection()) + registry.connection.open(*args, **kwargs) + instance = kwargs.get('instance_name', INSTANCE) - my_instance = syncano.connect_instance('my_instance_name', email='', password='') - my_instance = syncano.connect_instance('my_instance_name', api_key='') - """ - name = name or INSTANCE - connection = connect(*args, **kwargs) - return connection.Instance.please.get(name) + if instance is not None: + registry.set_used_instance(instance) + return registry diff --git a/syncano/connection.py b/syncano/connection.py index 5093218..f7c54f9 100644 --- a/syncano/connection.py +++ b/syncano/connection.py @@ -1,14 +1,19 @@ import json +import time from copy import deepcopy -from urlparse import urljoin import requests import six - import syncano -from syncano.exceptions import SyncanoRequestError, SyncanoValueError +from syncano.exceptions import RevisionMismatchException, SyncanoRequestError, SyncanoValueError + +if six.PY3: + from urllib.parse import urljoin +else: + from urlparse import urljoin + -__all__ = ['default_connection', 'Connection', 'ConnectionMixin'] +__all__ = ['Connection', 'ConnectionMixin'] def is_success(code): @@ -44,34 +49,99 @@ def open(self, *args, **kwargs): return connection -default_connection = DefaultConnection() - - class Connection(object): """Base connection class. :ivar host: Syncano API host :ivar email: Your Syncano email address :ivar password: Your Syncano password - :ivar api_key: Your Syncano ``Account Key`` + :ivar api_key: Your Syncano ``Account Key`` or instance ``Api Key`` + :ivar user_key: Your Syncano ``User Key`` + :ivar instance_name: Your Syncano ``Instance Name`` :ivar logger: Python logger instance :ivar timeout: Default request timeout :ivar verify_ssl: Verify SSL certificate """ - AUTH_SUFFIX = 'v1/account/auth' CONTENT_TYPE = 'application/json' - def __init__(self, host=None, email=None, password=None, api_key=None, **kwargs): + AUTH_SUFFIX = 'v1.1/account/auth' + ACCOUNT_SUFFIX = 'v1.1/account/' + SOCIAL_AUTH_SUFFIX = AUTH_SUFFIX + '/{social_backend}/' + + USER_AUTH_SUFFIX = 'v1.1/instances/{name}/user/auth/' + USER_INFO_SUFFIX = 'v1.1/instances/{name}/user/' + + REGISTER_SUFFIX = 'v1.1/account/register/' + + LOGIN_PARAMS = {'email', + 'password'} + ALT_LOGIN_PARAMS = {'api_key'} + + USER_LOGIN_PARAMS = {'username', + 'password', + 'api_key', + 'instance_name'} + USER_ALT_LOGIN_PARAMS = {'user_key', + 'api_key', + 'instance_name'} + + SOCIAL_LOGIN_PARAMS = {'token', + 'social_backend'} + + def __init__(self, host=None, **kwargs): self.host = host or syncano.API_ROOT - self.email = email or syncano.EMAIL - self.password = password or syncano.PASSWORD - self.api_key = api_key or syncano.APIKEY - self.logger = kwargs.get('logger') or syncano.logger - self.timeout = kwargs.get('timeout') or 30 - self.session = requests.Session() + self.logger = kwargs.get('logger', syncano.logger) + self.timeout = kwargs.get('timeout', 30) + # We don't need to check SSL cert in DEBUG mode self.verify_ssl = kwargs.pop('verify_ssl', True) + self._init_login_params(kwargs) + + if self.is_user: + self.AUTH_SUFFIX = self.USER_AUTH_SUFFIX.format(name=self.instance_name) + self.auth_method = self.authenticate_user + else: + if self.is_social: + self.AUTH_SUFFIX = self.SOCIAL_AUTH_SUFFIX.format(social_backend=self.social_backend) + self.auth_method = self.authenticate_admin + + self.session = requests.Session() + + def _init_login_params(self, login_kwargs): + for param in self.LOGIN_PARAMS.union(self.ALT_LOGIN_PARAMS, + self.USER_LOGIN_PARAMS, + self.USER_ALT_LOGIN_PARAMS, + self.SOCIAL_LOGIN_PARAMS): + def_name = param.replace('_', '').upper() + value = login_kwargs.get(param, getattr(syncano, def_name, None)) + setattr(self, param, value) + + def _are_params_ok(self, params): + return all(getattr(self, p) for p in params) + + @property + def is_user(self): + login_params_ok = self._are_params_ok(self.USER_LOGIN_PARAMS) + alt_login_params_ok = self._are_params_ok(self.USER_ALT_LOGIN_PARAMS) + return login_params_ok or alt_login_params_ok + + @property + def is_social(self): + return self._are_params_ok(self.SOCIAL_LOGIN_PARAMS) + + @property + def is_alt_login(self): + if self.is_user: + return self._are_params_ok(self.USER_ALT_LOGIN_PARAMS) + return self._are_params_ok(self.ALT_LOGIN_PARAMS) + + @property + def auth_key(self): + if self.is_user: + return self.user_key + return self.api_key + def build_params(self, params): """ :type params: dict @@ -81,15 +151,20 @@ def build_params(self, params): :return: Request params """ params = deepcopy(params) - params['timeout'] = params.get('timeout') or self.timeout - params['headers'] = params.get('headers') or {} - params['verify'] = True + params['timeout'] = params.get('timeout', self.timeout) + params['headers'] = params.get('headers', {}) + params['verify'] = self.verify_ssl if 'content-type' not in params['headers']: params['headers']['content-type'] = self.CONTENT_TYPE - if self.api_key and 'Authorization' not in params['headers']: - params['headers']['Authorization'] = 'ApiKey %s' % self.api_key + if self.is_user: + params['headers'].update({ + 'X-USER-KEY': self.user_key, + 'X-API-KEY': self.api_key + }) + elif self.api_key and 'Authorization' not in params['headers']: + params['headers']['Authorization'] = 'token {}'.format(self.api_key) # We don't need to check SSL cert in DEBUG mode if syncano.DEBUG or not self.verify_ssl: @@ -141,10 +216,9 @@ def request(self, method_name, path, **kwargs): :rtype: dict :return: JSON response """ - - if not self.is_authenticated(): + is_auth = self.is_authenticated() + if not is_auth: self.authenticate() - return self.make_request(method_name, path, **kwargs) def make_request(self, method_name, path, **kwargs): @@ -161,13 +235,25 @@ def make_request(self, method_name, path, **kwargs): :raises SyncanoValueError: if invalid request method was chosen :raises SyncanoRequestError: if something went wrong during the request """ + data = kwargs.get('data', {}) + files = data.pop('files', None) + + self._check_batch_files(data) + + if files is None: + files = {k: v for k, v in six.iteritems(data) if hasattr(v, 'read')} + if data: + kwargs['data'] = {k: v for k, v in six.iteritems(data) if k not in files} + params = self.build_params(kwargs) method = getattr(self.session, method_name.lower(), None) # JSON dump can be expensive if syncano.DEBUG: + debug_params = params.copy() + debug_params.update({'files': [f for f in files]}) # show files in debug info; formatted_params = json.dumps( - params, + debug_params, sort_keys=True, indent=2, separators=(',', ': ') @@ -185,6 +271,29 @@ def make_request(self, method_name, path, **kwargs): url = self.build_url(path) response = method(url, **params) + while response.status_code == 429: # throttling; + retry_after = response.headers.get('retry-after', 1) + time.sleep(float(retry_after)) + response = method(url, **params) + content = self.get_response_content(url, response) + + if files: + # remove 'data' and 'content-type' to avoid "ValueError: Data must not be a string." + params.pop('data') + params['headers'].pop('content-type') + params['files'] = self._process_apns_cert_files(files) + + if response.status_code == 201: + url = '{}{}/'.format(url, content['id']) + + patch = getattr(self.session, 'patch') + # second request is needed to upload a file + response = patch(url, **params) + content = self.get_response_content(url, response) + + return content + + def get_response_content(self, url, response): try: content = response.json() except ValueError: @@ -195,6 +304,8 @@ def make_request(self, method_name, path, **kwargs): # Validation error if is_client_error(response.status_code): + if response.status_code == 400 and 'expected_revision' in content: + raise RevisionMismatchException(response.status_code, content) raise SyncanoRequestError(response.status_code, content) # Other errors @@ -212,10 +323,11 @@ def is_authenticated(self): :rtype: boolean :return: Session authentication state """ - + if self.is_user: + return self.user_key is not None return self.api_key is not None - def authenticate(self, email=None, password=None): + def authenticate(self, **kwargs): """ :type email: string :param email: Your Syncano account email address @@ -223,32 +335,118 @@ def authenticate(self, email=None, password=None): :type password: string :param password: Your Syncano password + :type api_key: string + :param api_key: Your Syncano api_key for instance + :rtype: string :return: Your ``Account Key`` """ - - if self.is_authenticated(): - self.logger.debug('Connection already authenticated: %s', self.api_key) - return self.api_key - - email = email or self.email - password = password or self.password - - if not email: - raise SyncanoValueError('"email" is required.') - - if not password: - raise SyncanoValueError('"password" is required.') - - self.logger.debug('Authenticating: %s', email) - - data = {'email': email, 'password': password} - response = self.make_request('POST', self.AUTH_SUFFIX, data=data) - account_key = response.get('account_key') - self.api_key = account_key - - self.logger.debug('Authentication successful: %s', account_key) - return account_key + is_auth = self.is_authenticated() + + if is_auth: + msg = 'Connection already authenticated: {}' + else: + msg = 'Authentication successful: {}' + self.logger.debug('Authenticating') + self.auth_method(**kwargs) + key = self.auth_key + self.logger.debug(msg.format(key)) + return key + + def validate_params(self, kwargs, params): + for k in params: + kwargs[k] = kwargs.get(k, getattr(self, k)) + + if kwargs[k] is None: + raise SyncanoValueError('"{}" is required.'.format(k)) + return kwargs + + def authenticate_admin(self, **kwargs): + if self.is_alt_login: + request_args = self.validate_params(kwargs, + self.ALT_LOGIN_PARAMS) + else: + if self.is_social: + request_args = self.validate_params(kwargs, + self.SOCIAL_LOGIN_PARAMS) + request_args['access_token'] = request_args.pop('token') # core expects a access_token field; + else: + request_args = self.validate_params(kwargs, + self.LOGIN_PARAMS) + + response = self.make_request('POST', self.AUTH_SUFFIX, data=request_args) + self.api_key = response.get('account_key') + return self.api_key + + def authenticate_user(self, **kwargs): + if self.is_alt_login: + request_args = self.validate_params(kwargs, + self.USER_ALT_LOGIN_PARAMS) + else: + request_args = self.validate_params(kwargs, + self.USER_LOGIN_PARAMS) + headers = { + 'content-type': self.CONTENT_TYPE, + 'X-API-KEY': request_args.pop('api_key') + } + response = self.make_request('POST', self.AUTH_SUFFIX, data=request_args, headers=headers) + self.user_key = response.get('user_key') + return self.user_key + + def get_account_info(self, api_key=None): + self.api_key = api_key or self.api_key + + if not self.api_key: + raise SyncanoValueError('api_key is required.') + + return self.make_request('GET', self.ACCOUNT_SUFFIX, headers={'X-API-KEY': self.api_key}) + + def get_user_info(self, api_key=None, user_key=None): + self.api_key = api_key or self.api_key + self.user_key = user_key or self.user_key + + for attribute_name in ('api_key', 'user_key', 'instance_name'): + if not getattr(self, attribute_name, None): + raise SyncanoValueError('{attribute_name} is required.'.format(attribute_name=attribute_name)) + + return self.make_request('GET', self.USER_INFO_SUFFIX.format(name=self.instance_name), headers={ + 'X-API-KEY': self.api_key, 'X-USER-KEY': self.user_key}) + + @classmethod + def _check_batch_files(cls, data): + if 'requests' in data: # batch requests + for request in data['requests']: + per_request_files = request.get('body', {}).get('files', {}) + if per_request_files: + raise SyncanoValueError('Batch do not support files upload.') + + def _process_apns_cert_files(self, files): + files = files.copy() + for key in [file_name for file_name in files.keys()]: + # remove certificates files (which are bool - True if cert exist, False otherwise) + value = files[key] + if isinstance(value, bool): + files.pop(key) + continue + + if key in ['production_certificate', 'development_certificate']: + value = (value.name, value, 'application/x-pkcs12', {'Expires': '0'}) + files[key] = value + return files + + def register(self, email, password, first_name=None, last_name=None, invitation_key=None): + register_data = { + 'email': email, + 'password': password, + } + for name, value in zip(['first_name', 'last_name', 'invitation_key'], + [first_name, last_name, invitation_key]): + if value: + register_data.update({name: value}) + response = self.make_request('POST', self.REGISTER_SUFFIX, data=register_data) + + self.api_key = response['account_key'] + return self.api_key class ConnectionMixin(object): @@ -261,7 +459,8 @@ def __init__(self, *args, **kwargs): @property def connection(self): # Sometimes someone will not use super - return getattr(self, '_connection', None) or default_connection() + from syncano.models.registry import registry # TODO: refactor this; + return getattr(self, '_connection', None) or registry.connection() @connection.setter def connection(self, value): diff --git a/syncano/exceptions.py b/syncano/exceptions.py index 7ce09f4..1e2e7d8 100644 --- a/syncano/exceptions.py +++ b/syncano/exceptions.py @@ -32,7 +32,9 @@ def __init__(self, status_code, reason, *args): self.status_code = status_code if isinstance(reason, dict): - message = ''.join(reason.get(k, '') for k in ['detail', 'error', '__all__']) + joined_details = (''.join(reason.get(k, '')) for k in ['detail', 'error', '__all__']) + message = ''.join(joined_details) + if not message: for name, value in six.iteritems(reason): if isinstance(value, (list, dict)): @@ -69,3 +71,11 @@ def __str__(self): class SyncanoDoesNotExist(SyncanoException): """Syncano object doesn't exist error occurred.""" + + +class RevisionMismatchException(SyncanoRequestError): + """Revision do not match with expected one""" + + +class UserNotFound(SyncanoRequestError): + """Special error to handle user not found case.""" diff --git a/syncano/models/__init__.py b/syncano/models/__init__.py index a63b8ae..530d26c 100644 --- a/syncano/models/__init__.py +++ b/syncano/models/__init__.py @@ -1,3 +1 @@ from .base import * # NOQA -from .fields import * # NOQA -from .channels import * # NOQA diff --git a/syncano/models/accounts.py b/syncano/models/accounts.py new file mode 100644 index 0000000..1e79eca --- /dev/null +++ b/syncano/models/accounts.py @@ -0,0 +1,264 @@ +from syncano.exceptions import SyncanoRequestError, SyncanoValueError, UserNotFound + +from . import fields +from .base import Model +from .classes import Class, DataObjectMixin, Object +from .instances import Instance +from .manager import ObjectManager + + +class Admin(Model): + """ + OO wrapper around instance admins `link `_. + + :ivar first_name: :class:`~syncano.models.fields.StringField` + :ivar last_name: :class:`~syncano.models.fields.StringField` + :ivar email: :class:`~syncano.models.fields.EmailField` + :ivar role: :class:`~syncano.models.fields.ChoiceField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + """ + ROLE_CHOICES = ( + {'display_name': 'full', 'value': 'full'}, + {'display_name': 'write', 'value': 'write'}, + {'display_name': 'read', 'value': 'read'}, + ) + + first_name = fields.StringField(read_only=True, required=False) + last_name = fields.StringField(read_only=True, required=False) + email = fields.EmailField(read_only=True, required=False) + role = fields.ChoiceField(choices=ROLE_CHOICES) + links = fields.LinksField() + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['put', 'get', 'patch', 'delete'], + 'path': '/admins/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/admins/', + } + } + + +class Profile(DataObjectMixin, Object): + """ + """ + + PREDEFINED_CLASS_NAME = 'user_profile' + + PERMISSIONS_CHOICES = ( + {'display_name': 'None', 'value': 'none'}, + {'display_name': 'Read', 'value': 'read'}, + {'display_name': 'Write', 'value': 'write'}, + {'display_name': 'Full', 'value': 'full'}, + ) + + owner = fields.IntegerField(label='owner id', required=False, read_only=True) + owner_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') + group = fields.IntegerField(label='group id', required=False) + group_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') + other_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') + channel = fields.StringField(required=False) + channel_room = fields.StringField(required=False, max_length=64) + + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + parent = Class + endpoints = { + 'detail': { + 'methods': ['delete', 'post', 'patch', 'get'], + 'path': '/objects/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/objects/', + } + } + + please = ObjectManager() + + +class User(Model): + """ + OO wrapper around users `link `_. + + :ivar username: :class:`~syncano.models.fields.StringField` + :ivar password: :class:`~syncano.models.fields.StringField` + :ivar user_key: :class:`~syncano.models.fields.StringField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + """ + + username = fields.StringField(max_length=64, required=True) + password = fields.StringField(read_only=False, required=True) + user_key = fields.StringField(read_only=True, required=False) + + profile = fields.ModelField('Profile', read_only=False, default={}, + just_pk=False, is_data_object_mixin=True) + + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'patch', 'put', 'get'], + 'path': '/users/{id}/', + }, + 'reset_key': { + 'methods': ['post'], + 'path': '/users/{id}/reset_key/', + }, + 'auth': { + 'methods': ['post'], + 'path': '/user/auth/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/users/', + }, + 'groups': { + 'methods': ['get', 'post', 'delete'], + 'path': '/users/{id}/groups/', + } + } + + def reset_key(self): + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('reset_key', properties, http_method) + connection = self._get_connection() + return connection.request(http_method, endpoint) + + def auth(self, username=None, password=None): + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('auth', properties, http_method) + connection = self._get_connection() + + if not (username and password): + raise SyncanoValueError('You need provide username and password.') + + data = { + 'username': username, + 'password': password + } + + return connection.request(http_method, endpoint, data=data) + + def _user_groups_method(self, group_id=None, method='GET'): + properties = self.get_endpoint_data() + endpoint = self._meta.resolve_endpoint('groups', properties, method) + + if group_id is not None and method != 'POST': + endpoint += '{}/'.format(group_id) + connection = self._get_connection() + + data = {} + if method == 'POST': + data = {'group': group_id} + + response = connection.request(method, endpoint, data=data) + + if method == 'DELETE': # no response here; + return + + if 'objects' in response: + return [Group(**group_response['group']) for group_response in response['objects']] + + return Group(**response['group']) + + def add_to_group(self, group_id): + return self._user_groups_method(group_id, method='POST') + + def list_groups(self): + return self._user_groups_method() + + def group_details(self, group_id): + return self._user_groups_method(group_id) + + def remove_from_group(self, group_id): + return self._user_groups_method(group_id, method='DELETE') + + +class Group(Model): + """ + OO wrapper around groups `link `_. + + :ivar label: :class:`~syncano.models.fields.StringField` + :ivar description: :class:`~syncano.models.fields.StringField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + """ + + label = fields.StringField(max_length=64, required=True) + description = fields.StringField(read_only=False, required=False) + + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'patch', 'put', 'get'], + 'path': '/groups/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/groups/', + }, + 'users': { + 'methods': ['get', 'post', 'delete'], + 'path': '/groups/{id}/users/', + } + } + + def _group_users_method(self, user_id=None, method='GET'): + properties = self.get_endpoint_data() + endpoint = self._meta.resolve_endpoint('users', properties, method) + if user_id is not None and method != 'POST': + endpoint += '{}/'.format(user_id) + connection = self._get_connection() + + data = {} + if method == 'POST': + data = {'user': user_id} + + try: + response = connection.request(method, endpoint, data=data) + except SyncanoRequestError as e: + if e.status_code == 404: + raise UserNotFound(e.status_code, 'User not found.') + raise + + if method == 'DELETE': + return + + if 'objects' in response: + return [User(**user_response['user']) for user_response in response['objects']] + + return User(**response['user']) + + def list_users(self): + return self._group_users_method() + + def add_user(self, user_id): + return self._group_users_method(user_id, method='POST') + + def user_details(self, user_id): + return self._group_users_method(user_id) + + def delete_user(self, user_id): + return self._group_users_method(user_id, method='DELETE') diff --git a/syncano/models/archetypes.py b/syncano/models/archetypes.py new file mode 100644 index 0000000..236da8b --- /dev/null +++ b/syncano/models/archetypes.py @@ -0,0 +1,279 @@ + + +import inspect + +import six +from syncano.exceptions import SyncanoDoesNotExist, SyncanoValidationError + +from . import fields +from .manager import Manager +from .options import Options +from .registry import registry + + +class ModelMetaclass(type): + """Metaclass for all models. + """ + def __new__(cls, name, bases, attrs): + super_new = super(ModelMetaclass, cls).__new__ + + parents = [b for b in bases if isinstance(b, ModelMetaclass)] + abstracts = [b for b in bases if hasattr(b, 'Meta') and getattr(b.Meta, 'abstract', None)] + if not parents: + return super_new(cls, name, bases, attrs) + + module = attrs.pop('__module__', None) + new_class = super_new(cls, name, bases, {'__module__': module}) + + meta = attrs.pop('Meta', None) or getattr(new_class, 'Meta', None) + meta = Options(meta) + new_class.add_to_class('_meta', meta) + + manager = attrs.pop('please', Manager()) + new_class.add_to_class('please', manager) + + error_class = new_class.create_error_class() + new_class.add_to_class('DoesNotExist', error_class) + + for n, v in six.iteritems(attrs): + new_class.add_to_class(n, v) + + for abstract in abstracts: + for n, v in six.iteritems(abstract.__dict__): + if isinstance(v, fields.Field) or n in ['LINKS']: # extend this condition if required; + new_class.add_to_class(n, v) + + if not meta.pk: + pk_field = fields.IntegerField(primary_key=True, read_only=True, + required=False) + new_class.add_to_class('id', pk_field) + + for field_name in meta.endpoint_fields: + if field_name not in meta.field_names: + endpoint_field = fields.EndpointField() + new_class.add_to_class(field_name, endpoint_field) + + new_class.build_doc(name, meta) + registry.add(name, new_class) + return new_class + + def add_to_class(cls, name, value): + if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'): + value.contribute_to_class(cls, name) + else: + setattr(cls, name, value) + + def create_error_class(cls): + return type( + str('{0}DoesNotExist'.format(cls.__name__)), + (SyncanoDoesNotExist, ), + {} + ) + + def build_doc(cls, name, meta): + """Give the class a docstring if it's not defined. + """ + if cls.__doc__ is not None: + return + + field_names = ['{0} = {1}'.format(f.name, f.__class__.__name__) for f in meta.fields] + cls.__doc__ = '{0}:\n\t{1}'.format(name, '\n\t'.join(field_names)) + + +class Model(six.with_metaclass(ModelMetaclass)): + """Base class for all models. + """ + + def __init__(self, **kwargs): + self.is_lazy = kwargs.pop('is_lazy', False) + self._raw_data = {} + self.to_python(kwargs) + + def __repr__(self): + """Displays current instance class name and pk. + """ + return '<{0}: {1}>'.format( + self.__class__.__name__, + self.pk + ) + + def __str__(self): + """Wrapper around ```repr`` method. + """ + return repr(self) + + def __unicode__(self): + """Wrapper around ```repr`` method with proper encoding. + """ + return six.u(repr(self)) + + def __eq__(self, other): + if isinstance(other, Model): + return self.pk == other.pk + return NotImplemented + + def _get_connection(self, **kwargs): + connection = kwargs.pop('connection', None) + return connection or self._meta.connection + + def save(self, **kwargs): + """ + Creates or updates the current instance. + Override this in a subclass if you want to control the saving process. + """ + self.validate() + data = self.to_native() + connection = self._get_connection(**kwargs) + properties = self.get_endpoint_data() + endpoint_name = 'list' + method = 'POST' + + if not self.is_new(): + endpoint_name = 'detail' + methods = self._meta.get_endpoint_methods(endpoint_name) + if 'put' in methods: + method = 'PUT' + + endpoint = self._meta.resolve_endpoint(endpoint_name, properties, method) + if 'expected_revision' in kwargs: + data.update({'expected_revision': kwargs['expected_revision']}) + request = {'data': data} + + if not self.is_lazy: + response = connection.request(method, endpoint, **request) + self.to_python(response) + return self + + return self.batch_object(method=method, path=endpoint, body=request['data'], properties=data) + + @classmethod + def batch_object(cls, method, path, body, properties=None): + properties = properties if properties else {} + return { + 'body': { + 'method': method, + 'path': path, + 'body': body, + }, + 'meta': { + 'model': cls, + 'properties': properties + } + } + + def mark_for_batch(self): + self.is_lazy = True + + def delete(self, **kwargs): + """Removes the current instance. + """ + if self.is_new(): + raise SyncanoValidationError('Method allowed only on existing model.') + + properties = self.get_endpoint_data() + http_method = 'DELETE' + endpoint = self._meta.resolve_endpoint('detail', properties, http_method) + connection = self._get_connection(**kwargs) + connection.request(http_method, endpoint) + if self.__class__.__name__ == 'Instance': # avoid circular import; + registry.clear_used_instance() + self._raw_data = {} + + def reload(self, **kwargs): + """Reloads the current instance. + """ + if self.is_new(): + raise SyncanoValidationError('Method allowed only on existing model.') + + properties = self.get_endpoint_data() + http_method = 'GET' + endpoint = self._meta.resolve_endpoint('detail', properties, http_method) + connection = self._get_connection(**kwargs) + response = connection.request(http_method, endpoint) + self.to_python(response) + + def validate(self): + """ + Validates the current instance. + + :raises: SyncanoValidationError, SyncanoFieldError + """ + for field in self._meta.fields: + if not field.read_only: + value = getattr(self, field.name) + field.validate(value, self) + + def is_valid(self): + try: + self.validate() + except SyncanoValidationError: + return False + else: + return True + + def is_new(self): + if 'links' in self._meta.field_names: + return not self.links + + if self._meta.pk.read_only and not self.pk: + return True + + return False + + def to_python(self, data): + """ + Converts raw data to python types and built-in objects. + + :type data: dict + :param data: Raw data + """ + + for field in self._meta.fields: + field_name = field.name + + # some explanation needed here: + # When data comes from Syncano Platform the 'class' field is there + # so to map correctly the 'class' value to the 'class_name' field + # the mapping is required. + # But. When DataEndpoint (and probably others models with mapping) is created from + # syncano LIB directly: DataEndpoint(class_name='some_class') + # the data dict has only 'class_name' key - not the 'class', + # later the transition between class_name and class is made in to_native on model; + if field.mapping is not None and field.mapping in data and self.is_new(): + field_name = field.mapping + + if field_name in data: + value = data[field_name] + setattr(self, field.name, value) + + if isinstance(field, fields.RelationField): + setattr(self, "{}_set".format(field_name), field(instance=self, field_name=field_name)) + + def to_native(self): + """Converts the current instance to raw data which + can be serialized to JSON and send to API. + """ + data = {} + for field in self._meta.fields: + if not field.read_only and field.has_data: + value = getattr(self, field.name) + if value is None and field.blank: + continue + + if field.mapping: + data[field.mapping] = field.to_native(value) + else: + + param_name = getattr(field, 'param_name', field.name) + if param_name == 'files' and param_name in data: + data[param_name].update(field.to_native(value)) + else: + data[param_name] = field.to_native(value) + return data + + def get_endpoint_data(self): + properties = {} + for field in self._meta.fields: + if field.has_endpoint_data: + properties[field.name] = getattr(self, field.name) + return properties diff --git a/syncano/models/backups.py b/syncano/models/backups.py new file mode 100644 index 0000000..bf77353 --- /dev/null +++ b/syncano/models/backups.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +from . import fields +from .base import Model +from .instances import Instance + + +class Backup(Model): + """ + OO wrapper around backups `link `_. + + :ivar label: :class:`~syncano.models.fields.StringField` + :ivar description: :class:`~syncano.models.fields.StringField` + :ivar instance: :class:`~syncano.models.fields.StringField` + :ivar size: :class:`~syncano.models.fields.IntegerField` + :ivar status: :class:`~syncano.models.fields.StringField` + :ivar status_info: :class:`~syncano.models.fields.StringField` + :ivar author: :class:`~syncano.models.fields.ModelField` + :ivar details: :class:`~syncano.models.fields.JSONField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + """ + + label = fields.StringField(read_only=True) + description = fields.StringField(read_only=True) + + instance = fields.StringField(read_only=True) + size = fields.IntegerField(read_only=True) + status = fields.StringField(read_only=True) + status_info = fields.StringField(read_only=True) + author = fields.ModelField('Admin') + details = fields.JSONField(read_only=True) + + updated_at = fields.DateTimeField(read_only=True, required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + links = fields.LinksField() + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['get', 'delete'], + 'path': '/backups/full/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/backups/full/', + }, + } + + def schedule_restore(self): + restore = Restore(backup=self.id).save() + return restore + + +class Restore(Model): + + author = fields.ModelField('Admin') + status = fields.StringField(read_only=True) + status_info = fields.StringField(read_only=True) + updated_at = fields.DateTimeField(read_only=True, required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + links = fields.LinksField() + backup = fields.StringField() + archive = fields.StringField(read_only=True) + + class Meta: + parent = Instance + endpoints = { + 'list': { + 'methods': ['get', 'post'], + 'path': '/restores/', + }, + 'detail': { + 'methods': ['get'], + 'path': '/restores/{id}/', + } + } diff --git a/syncano/models/base.py b/syncano/models/base.py index eb23fcc..2f4eeeb 100644 --- a/syncano/models/base.py +++ b/syncano/models/base.py @@ -1,993 +1,17 @@ -from __future__ import unicode_literals - -import inspect -import json -from copy import deepcopy -from datetime import datetime - -import six - -from syncano.exceptions import SyncanoDoesNotExist, SyncanoValidationError -from syncano.utils import get_class_name - -from . import fields -from .manager import CodeBoxManager, Manager, ObjectManager, WebhookManager -from .options import Options -from .registry import registry - - -class ModelMetaclass(type): - """Metaclass for all models.""" - - def __new__(cls, name, bases, attrs): - super_new = super(ModelMetaclass, cls).__new__ - - parents = [b for b in bases if isinstance(b, ModelMetaclass)] - if not parents: - return super_new(cls, name, bases, attrs) - - module = attrs.pop('__module__', None) - new_class = super_new(cls, name, bases, {'__module__': module}) - - meta = attrs.pop('Meta', None) or getattr(new_class, 'Meta', None) - meta = Options(meta) - new_class.add_to_class('_meta', meta) - - manager = attrs.pop('please', Manager()) - new_class.add_to_class('please', manager) - - error_class = new_class.create_error_class() - new_class.add_to_class('DoesNotExist', error_class) - - for n, v in six.iteritems(attrs): - new_class.add_to_class(n, v) - - if not meta.pk: - pk_field = fields.IntegerField(primary_key=True, read_only=True, - required=False) - new_class.add_to_class('id', pk_field) - - for field_name in meta.endpoint_fields: - if field_name not in meta.field_names: - endpoint_field = fields.EndpointField() - new_class.add_to_class(field_name, endpoint_field) - - new_class.build_doc(name, meta) - registry.add(name, new_class) - return new_class - - def add_to_class(cls, name, value): - if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'): - value.contribute_to_class(cls, name) - else: - setattr(cls, name, value) - - def create_error_class(cls): - return type( - str('{0}DoesNotExist'.format(cls.__name__)), - (SyncanoDoesNotExist, ), - {} - ) - - def build_doc(cls, name, meta): - """Give the class a docstring if it's not defined.""" - if cls.__doc__ is not None: - return - - field_names = ['{0} = {1}'.format(f.name, f.__class__.__name__) for f in meta.fields] - cls.__doc__ = '{0}:\n\t{1}'.format(name, '\n\t'.join(field_names)) - - -class Model(six.with_metaclass(ModelMetaclass)): - """Base class for all models.""" - - def __init__(self, **kwargs): - self._raw_data = {} - self.to_python(kwargs) - - def __repr__(self): - """Displays current instance class name and pk.""" - return '<{0}: {1}>'.format( - self.__class__.__name__, - self.pk - ) - - def __str__(self): - """Wrapper around ```repr`` method.""" - return repr(self) - - def __unicode__(self): - """Wrapper around ```repr`` method with proper encoding.""" - return six.u(repr(self)) - - def __eq__(self, other): - if isinstance(other, Model): - return self.pk == other.pk - return NotImplemented - - def _get_connection(self, **kwargs): - connection = kwargs.pop('connection', None) - return connection or self._meta.connection - - def save(self, **kwargs): - """ - Creates or updates the current instance. - Override this in a subclass if you want to control the saving process. - """ - self.validate() - data = self.to_native() - connection = self._get_connection(**kwargs) - properties = self.get_endpoint_data() - endpoint_name = 'list' - method = 'POST' - - if not self.is_new(): - endpoint_name = 'detail' - methods = self._meta.get_endpoint_methods(endpoint_name) - if 'put' in methods: - method = 'PUT' - - endpoint = self._meta.resolve_endpoint(endpoint_name, properties) - request = {'data': data} - response = connection.request(method, endpoint, **request) - - self.to_python(response) - return self - - def delete(self, **kwargs): - """Removes the current instance.""" - if self.is_new(): - raise SyncanoValidationError('Method allowed only on existing model.') - - properties = self.get_endpoint_data() - endpoint = self._meta.resolve_endpoint('detail', properties) - connection = self._get_connection(**kwargs) - connection.request('DELETE', endpoint) - self._raw_data = {} - - def reload(self, **kwargs): - """Reloads the current instance.""" - if self.is_new(): - raise SyncanoValidationError('Method allowed only on existing model.') - - properties = self.get_endpoint_data() - endpoint = self._meta.resolve_endpoint('detail', properties) - connection = self._get_connection(**kwargs) - response = connection.request('GET', endpoint) - self.to_python(response) - - def validate(self): - """ - Validates the current instance. - - :raises: SyncanoValidationError, SyncanoFieldError - """ - for field in self._meta.fields: - if not field.read_only: - value = getattr(self, field.name) - field.validate(value, self) - - def is_valid(self): - try: - self.validate() - except SyncanoValidationError: - return False - else: - return True - - def is_new(self): - if 'links' in self._meta.field_names: - return not self.links - - if self._meta.pk.read_only and not self.pk: - return True - - return False - - def to_python(self, data): - """ - Converts raw data to python types and built-in objects. - - :type data: dict - :param data: Raw data - """ - for field in self._meta.fields: - if field.name in data: - value = data[field.name] - setattr(self, field.name, value) - - def to_native(self): - """Converts the current instance to raw data which - can be serialized to JSON and send to API.""" - data = {} - for field in self._meta.fields: - if not field.read_only and field.has_data: - value = getattr(self, field.name) - if not value and field.blank: - continue - data[field.name] = field.to_native(value) - return data - - def get_endpoint_data(self): - properties = {} - for field in self._meta.fields: - if field.has_endpoint_data: - properties[field.name] = getattr(self, field.name) - return properties - - -class Coupon(Model): - """ - OO wrapper around coupons `endpoint `_. - - :ivar name: :class:`~syncano.models.fields.StringField` - :ivar redeem_by: :class:`~syncano.models.fields.DateField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar percent_off: :class:`~syncano.models.fields.IntegerField` - :ivar amount_off: :class:`~syncano.models.fields.FloatField` - :ivar currency: :class:`~syncano.models.fields.ChoiceField` - :ivar duration: :class:`~syncano.models.fields.IntegerField` - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - {'type': 'list', 'name': 'redeem'}, - ) - CURRENCY_CHOICES = ( - {'display_name': 'USD', 'value': 'usd'}, - ) - - name = fields.StringField(max_length=32, primary_key=True) - redeem_by = fields.DateField() - links = fields.HyperlinkedField(links=LINKS) - percent_off = fields.IntegerField(required=False) - amount_off = fields.FloatField(required=False) - currency = fields.ChoiceField(choices=CURRENCY_CHOICES) - duration = fields.IntegerField(default=0) - - class Meta: - endpoints = { - 'detail': { - 'methods': ['get', 'delete'], - 'path': '/v1/billing/coupons/{name}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/v1/billing/coupons/', - } - } - - -class Discount(Model): - """ - OO wrapper around discounts `endpoint `_. - - :ivar instance: :class:`~syncano.models.fields.ModelField` - :ivar coupon: :class:`~syncano.models.fields.ModelField` - :ivar start: :class:`~syncano.models.fields.DateField` - :ivar end: :class:`~syncano.models.fields.DateField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - ) - - instance = fields.ModelField('Instance') - coupon = fields.ModelField('Coupon') - start = fields.DateField(read_only=True, required=False) - end = fields.DateField(read_only=True, required=False) - links = fields.HyperlinkedField(links=LINKS) - - class Meta: - endpoints = { - 'detail': { - 'methods': ['get'], - 'path': '/v1/billing/discounts/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/v1/billing/discounts/', - } - } - - -class Instance(Model): - """ - OO wrapper around instances `endpoint `_. - - :ivar name: :class:`~syncano.models.fields.StringField` - :ivar description: :class:`~syncano.models.fields.StringField` - :ivar role: :class:`~syncano.models.fields.Field` - :ivar owner: :class:`~syncano.models.fields.ModelField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar metadata: :class:`~syncano.models.fields.JSONField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - {'type': 'list', 'name': 'admins'}, - {'type': 'list', 'name': 'classes'}, - {'type': 'list', 'name': 'codeboxes'}, - {'type': 'list', 'name': 'invitations'}, - {'type': 'list', 'name': 'runtimes'}, - {'type': 'list', 'name': 'api_keys'}, - {'type': 'list', 'name': 'triggers'}, - {'type': 'list', 'name': 'webhooks'}, - {'type': 'list', 'name': 'schedules'}, - ) - - name = fields.StringField(max_length=64, primary_key=True) - description = fields.StringField(read_only=False, required=False) - role = fields.Field(read_only=True, required=False) - owner = fields.ModelField('Admin', read_only=True) - links = fields.HyperlinkedField(links=LINKS) - metadata = fields.JSONField(read_only=False, required=False) - created_at = fields.DateTimeField(read_only=True, required=False) - updated_at = fields.DateTimeField(read_only=True, required=False) - - class Meta: - endpoints = { - 'detail': { - 'methods': ['delete', 'patch', 'put', 'get'], - 'path': '/v1/instances/{name}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/v1/instances/', - } - } - - -class ApiKey(Model): - """ - OO wrapper around instance api keys `endpoint `_. - - :ivar api_key: :class:`~syncano.models.fields.StringField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - """ - LINKS = [ - {'type': 'detail', 'name': 'self'}, - ] - - api_key = fields.StringField(read_only=True, required=False) - links = fields.HyperlinkedField(links=LINKS) - - class Meta: - parent = Instance - endpoints = { - 'detail': { - 'methods': ['get', 'delete'], - 'path': '/api_keys/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/api_keys/', - } - } - - -class Class(Model): - """ - OO wrapper around instance classes `endpoint `_. - - :ivar name: :class:`~syncano.models.fields.StringField` - :ivar description: :class:`~syncano.models.fields.StringField` - :ivar objects_count: :class:`~syncano.models.fields.Field` - :ivar schema: :class:`~syncano.models.fields.SchemaField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar status: :class:`~syncano.models.fields.Field` - :ivar metadata: :class:`~syncano.models.fields.JSONField` - :ivar revision: :class:`~syncano.models.fields.IntegerField` - :ivar expected_revision: :class:`~syncano.models.fields.IntegerField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar group: :class:`~syncano.models.fields.IntegerField` - :ivar group_permissions: :class:`~syncano.models.fields.ChoiceField` - :ivar other_permissions: :class:`~syncano.models.fields.ChoiceField` - - .. note:: - This model is special because each related :class:`~syncano.models.base.Object` will be - **dynamically populated** with fields defined in schema attribute. - """ - - LINKS = [ - {'type': 'detail', 'name': 'self'}, - {'type': 'list', 'name': 'objects'}, - ] - - PERMISSIONS_CHOICES = ( - {'display_name': 'None', 'value': 'none'}, - {'display_name': 'Read', 'value': 'read'}, - {'display_name': 'Create objects', 'value': 'create_objects'}, - ) - - name = fields.StringField(max_length=64, primary_key=True) - description = fields.StringField(read_only=False, required=False) - objects_count = fields.Field(read_only=True, required=False) - - schema = fields.SchemaField(read_only=False, required=True) - links = fields.HyperlinkedField(links=LINKS) - status = fields.Field() - metadata = fields.JSONField(read_only=False, required=False) - - revision = fields.IntegerField(read_only=True, required=False) - expected_revision = fields.IntegerField(read_only=False, required=False) - updated_at = fields.DateTimeField(read_only=True, required=False) - created_at = fields.DateTimeField(read_only=True, required=False) - - group = fields.IntegerField(label='group id', required=False) - group_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') - other_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') - - class Meta: - parent = Instance - plural_name = 'Classes' - endpoints = { - 'detail': { - 'methods': ['get', 'put', 'patch', 'delete'], - 'path': '/classes/{name}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/classes/', - } - } - - -class CodeBox(Model): - """ - OO wrapper around codeboxes `endpoint `_. - - :ivar description: :class:`~syncano.models.fields.StringField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar source: :class:`~syncano.models.fields.StringField` - :ivar runtime_name: :class:`~syncano.models.fields.ChoiceField` - :ivar config: :class:`~syncano.models.fields.Field` - :ivar name: :class:`~syncano.models.fields.StringField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - - .. note:: - **CodeBox** has special method called ``run`` which will execute attached source code:: - - >>> CodeBox.please.run('instance-name', 1234) - >>> CodeBox.please.run('instance-name', 1234, payload={'variable_one': 1, 'variable_two': 2}) - >>> CodeBox.please.run('instance-name', 1234, payload="{\"variable_one\": 1, \"variable_two\": 2}") - - or via instance:: - - >>> cb = CodeBox.please.get('instance-name', 1234) - >>> cb.run() - >>> cb.run(variable_one=1, variable_two=2) - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - {'type': 'list', 'name': 'runtimes'}, - # This will cause name collision between model run method - # and HyperlinkedField dynamic methods. - # {'type': 'detail', 'name': 'run'}, - {'type': 'detail', 'name': 'traces'}, - ) - RUNTIME_CHOICES = ( - {'display_name': 'nodejs', 'value': 'nodejs'}, - {'display_name': 'python', 'value': 'python'}, - {'display_name': 'ruby', 'value': 'ruby'}, - ) - - name = fields.StringField(max_length=80) - description = fields.StringField(required=False) - source = fields.StringField() - runtime_name = fields.ChoiceField(choices=RUNTIME_CHOICES) - config = fields.Field(required=False) - links = fields.HyperlinkedField(links=LINKS) - created_at = fields.DateTimeField(read_only=True, required=False) - updated_at = fields.DateTimeField(read_only=True, required=False) - - please = CodeBoxManager() - - class Meta: - parent = Instance - name = 'Codebox' - plural_name = 'Codeboxes' - endpoints = { - 'detail': { - 'methods': ['put', 'get', 'patch', 'delete'], - 'path': '/codeboxes/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/codeboxes/', - }, - 'run': { - 'methods': ['post'], - 'path': '/codeboxes/{id}/run/', - }, - } - - def run(self, **payload): - """ - Usage:: - - >>> cb = CodeBox.please.get('instance-name', 1234) - >>> cb.run() - >>> cb.run(variable_one=1, variable_two=2) - """ - if self.is_new(): - raise SyncanoValidationError('Method allowed only on existing model.') - - properties = self.get_endpoint_data() - endpoint = self._meta.resolve_endpoint('run', properties) - connection = self._get_connection(**payload) - request = { - 'data': { - 'payload': json.dumps(payload) - } - } - response = connection.request('POST', endpoint, **request) - response.update({'instance_name': self.instance_name, 'codebox_id': self.id}) - return CodeBoxTrace(**response) - - -class CodeBoxTrace(Model): - STATUS_CHOICES = ( - {'display_name': 'Success', 'value': 'success'}, - {'display_name': 'Failure', 'value': 'failure'}, - {'display_name': 'Timeout', 'value': 'timeout'}, - {'display_name': 'Pending', 'value': 'pending'}, - ) - LINKS = ( - {'type': 'detail', 'name': 'self'}, - ) - - status = fields.ChoiceField(choices=STATUS_CHOICES, read_only=True, required=False) - links = fields.HyperlinkedField(links=LINKS) - executed_at = fields.DateTimeField(read_only=True, required=False) - result = fields.StringField(read_only=True, required=False) - duration = fields.IntegerField(read_only=True, required=False) - - class Meta: - parent = CodeBox - endpoints = { - 'detail': { - 'methods': ['get'], - 'path': '/traces/{id}/', - }, - 'list': { - 'methods': ['get'], - 'path': '/traces/', - } - } - - -class Schedule(Model): - """ - OO wrapper around codebox schedules `endpoint `_. - - :ivar description: :class:`~syncano.models.fields.StringField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar source: :class:`~syncano.models.fields.StringField` - :ivar runtime_name: :class:`~syncano.models.fields.ChoiceField` - :ivar config: :class:`~syncano.models.fields.Field` - :ivar name: :class:`~syncano.models.fields.StringField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - """ - - LINKS = [ - {'type': 'detail', 'name': 'self'}, - {'type': 'list', 'name': 'traces'}, - {'type': 'list', 'name': 'codebox'}, - ] - - interval_sec = fields.IntegerField(read_only=False, required=False) - crontab = fields.StringField(max_length=40, required=False) - payload = fields.StringField(required=False) - created_at = fields.DateTimeField(read_only=True, required=False) - scheduled_next = fields.DateTimeField(read_only=True, required=False) - links = fields.HyperlinkedField(links=LINKS) - - class Meta: - parent = Instance - endpoints = { - 'detail': { - 'methods': ['get', 'delete'], - 'path': '/schedules/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/schedules/', - } - } - - -class Trace(Model): - """ - OO wrapper around codebox schedules traces `endpoint `_. - - :ivar status: :class:`~syncano.models.fields.ChoiceField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar executed_at: :class:`~syncano.models.fields.DateTimeField` - :ivar result: :class:`~syncano.models.fields.StringField` - :ivar duration: :class:`~syncano.models.fields.IntegerField` - """ - - STATUS_CHOICES = ( - {'display_name': 'Success', 'value': 'success'}, - {'display_name': 'Failure', 'value': 'failure'}, - {'display_name': 'Timeout', 'value': 'timeout'}, - {'display_name': 'Pending', 'value': 'pending'}, - ) - LINKS = ( - {'type': 'detail', 'name': 'self'}, - ) - - status = fields.ChoiceField(choices=STATUS_CHOICES, read_only=True, required=False) - links = fields.HyperlinkedField(links=LINKS) - executed_at = fields.DateTimeField(read_only=True, required=False) - result = fields.StringField(read_only=True, required=False) - duration = fields.IntegerField(read_only=True, required=False) - - class Meta: - parent = Schedule - endpoints = { - 'detail': { - 'methods': ['get'], - 'path': '/traces/{id}/', - }, - 'list': { - 'methods': ['get'], - 'path': '/traces/', - } - } - - -class Admin(Model): - """ - OO wrapper around instance admins `endpoint `_. - - :ivar first_name: :class:`~syncano.models.fields.StringField` - :ivar last_name: :class:`~syncano.models.fields.StringField` - :ivar email: :class:`~syncano.models.fields.EmailField` - :ivar role: :class:`~syncano.models.fields.ChoiceField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - ) - ROLE_CHOICES = ( - {'display_name': 'full', 'value': 'full'}, - {'display_name': 'write', 'value': 'write'}, - {'display_name': 'read', 'value': 'read'}, - ) - - first_name = fields.StringField(read_only=True, required=False) - last_name = fields.StringField(read_only=True, required=False) - email = fields.EmailField(read_only=True, required=False) - role = fields.ChoiceField(choices=ROLE_CHOICES) - links = fields.HyperlinkedField(links=LINKS) - - class Meta: - parent = Instance - endpoints = { - 'detail': { - 'methods': ['put', 'get', 'patch', 'delete'], - 'path': '/admins/{id}/', - }, - 'list': { - 'methods': ['get'], - 'path': '/admins/', - } - } - - -class InstanceInvitation(Model): - """ - OO wrapper around instance invitations - `endpoint `_. - - :ivar email: :class:`~syncano.models.fields.EmailField` - :ivar role: :class:`~syncano.models.fields.ChoiceField` - :ivar key: :class:`~syncano.models.fields.StringField` - :ivar state: :class:`~syncano.models.fields.StringField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - ) - - email = fields.EmailField(max_length=254) - role = fields.ChoiceField(choices=Admin.ROLE_CHOICES) - key = fields.StringField(read_only=True, required=False) - state = fields.StringField(read_only=True, required=False) - links = fields.HyperlinkedField(links=LINKS) - created_at = fields.DateTimeField(read_only=True, required=False) - updated_at = fields.DateTimeField(read_only=True, required=False) - - class Meta: - parent = Instance - name = 'Invitation' - endpoints = { - 'detail': { - 'methods': ['get', 'delete'], - 'path': '/invitations/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/invitations/', - } - } - - -class Object(Model): - """ - OO wrapper around data objects `endpoint `_. - - :ivar revision: :class:`~syncano.models.fields.IntegerField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - :ivar owner: :class:`~syncano.models.fields.IntegerField` - :ivar owner_permissions: :class:`~syncano.models.fields.ChoiceField` - :ivar group: :class:`~syncano.models.fields.IntegerField` - :ivar group_permissions: :class:`~syncano.models.fields.ChoiceField` - :ivar other_permissions: :class:`~syncano.models.fields.ChoiceField` - :ivar channel: :class:`~syncano.models.fields.StringField` - :ivar channel_room: :class:`~syncano.models.fields.StringField` - - .. note:: - This model is special because each instance will be **dynamically populated** - with fields defined in related :class:`~syncano.models.base.Class` schema attribute. - """ - - PERMISSIONS_CHOICES = ( - {'display_name': 'None', 'value': 'none'}, - {'display_name': 'Read', 'value': 'read'}, - {'display_name': 'Write', 'value': 'write'}, - {'display_name': 'Full', 'value': 'full'}, - ) - - revision = fields.IntegerField(read_only=True, required=False) - created_at = fields.DateTimeField(read_only=True, required=False) - updated_at = fields.DateTimeField(read_only=True, required=False) - - owner = fields.IntegerField(label='owner id', required=False, read_only=True) - owner_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') - group = fields.IntegerField(label='group id', required=False) - group_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') - other_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') - channel = fields.StringField(required=False) - channel_room = fields.StringField(required=False, max_length=64) - - please = ObjectManager() - - class Meta: - parent = Class - endpoints = { - 'detail': { - 'methods': ['delete', 'post', 'patch', 'get'], - 'path': '/objects/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/objects/', - } - } - - @staticmethod - def __new__(cls, **kwargs): - instance_name = kwargs.get('instance_name') - class_name = kwargs.get('class_name') - - if not instance_name: - raise SyncanoValidationError('Field "instance_name" is required.') - - if not class_name: - raise SyncanoValidationError('Field "class_name" is required.') - - model = cls.get_subclass_model(instance_name, class_name) - return model(**kwargs) - - @classmethod - def create_subclass(cls, name, schema): - attrs = { - 'Meta': deepcopy(Object._meta), - '__new__': Model.__new__, # We don't want to have maximum recursion depth exceeded error - } - - for field in schema: - field_type = field.get('type') - field_class = fields.MAPPING[field_type] - query_allowed = ('order_index' in field or 'filter_index' in field) - attrs[field['name']] = field_class(required=False, read_only=False, - query_allowed=query_allowed) - - return type(str(name), (Object, ), attrs) - - @classmethod - def get_or_create_subclass(cls, name, schema): - try: - subclass = registry.get_model_by_name(name) - except LookupError: - subclass = cls.create_subclass(name, schema) - registry.add(name, subclass) - - return subclass - - @classmethod - def get_subclass_name(cls, instance_name, class_name): - return get_class_name(instance_name, class_name, 'object') - - @classmethod - def get_class_schema(cls, instance_name, class_name): - parent = cls._meta.parent - class_ = parent.please.get(instance_name, class_name) - return class_.schema - - @classmethod - def get_subclass_model(cls, instance_name, class_name, **kwargs): - """ - Creates custom :class:`~syncano.models.base.Object` sub-class definition based - on passed **instance_name** and **class_name**. - """ - model_name = cls.get_subclass_name(instance_name, class_name) - - if cls.__name__ == model_name: - return cls - - try: - model = registry.get_model_by_name(model_name) - except LookupError: - schema = cls.get_class_schema(instance_name, class_name) - model = cls.create_subclass(model_name, schema) - registry.add(model_name, model) - - return model - - -class Trigger(Model): - """ - OO wrapper around triggers `endpoint `_. - - :ivar codebox: :class:`~syncano.models.fields.IntegerField` - :ivar klass: :class:`~syncano.models.fields.StringField` - :ivar signal: :class:`~syncano.models.fields.ChoiceField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - :ivar created_at: :class:`~syncano.models.fields.DateTimeField` - :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` - """ - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - {'type': 'detail', 'name': 'codebox'}, - {'type': 'detail', 'name': 'klass'}, - {'type': 'detail', 'name': 'traces'}, - ) - SIGNAL_CHOICES = ( - {'display_name': 'post_update', 'value': 'post_update'}, - {'display_name': 'post_create', 'value': 'post_create'}, - {'display_name': 'post_delete', 'value': 'post_delete'}, - ) - - codebox = fields.IntegerField(label='codebox id') - klass = fields.StringField(label='class name') - signal = fields.ChoiceField(choices=SIGNAL_CHOICES) - links = fields.HyperlinkedField(links=LINKS) - created_at = fields.DateTimeField(read_only=True, required=False) - updated_at = fields.DateTimeField(read_only=True, required=False) - - class Meta: - parent = Instance - endpoints = { - 'detail': { - 'methods': ['put', 'get', 'patch', 'delete'], - 'path': '/triggers/{id}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/triggers/', - } - } - - -class WebhookResult(object): - """ - OO wrapper around result of :meth:`~syncano.models.base.Webhook.run` method. - """ - def __init__(self, status, duration, result, executed_at): - self.status = status - self.duration = duration - self.result = result - self.executed_at = executed_at - - if isinstance(executed_at, six.string_types): - executed_at = executed_at.split('Z')[0] - self.executed_at = datetime.strptime(executed_at, '%Y-%m-%dT%H:%M:%S.%f') - - -class Webhook(Model): - """ - OO wrapper around webhooks `endpoint `_. - - :ivar slug: :class:`~syncano.models.fields.SlugField` - :ivar codebox: :class:`~syncano.models.fields.IntegerField` - :ivar links: :class:`~syncano.models.fields.HyperlinkedField` - - .. note:: - **WebHook** has special method called ``run`` which will execute related codebox:: - - >>> Webhook.please.run('instance-name', 'webhook-slug') - >>> Webhook.please.run('instance-name', 'webhook-slug', payload={'variable_one': 1, 'variable_two': 2}) - >>> Webhook.please.run('instance-name', 'webhook-slug', - payload="{\"variable_one\": 1, \"variable_two\": 2}") - - or via instance:: - - >>> wh = Webhook.please.get('instance-name', 'webhook-slug') - >>> wh.run() - >>> wh.run(variable_one=1, variable_two=2) - - """ - RESULT_CLASS = WebhookResult - - LINKS = ( - {'type': 'detail', 'name': 'self'}, - {'type': 'detail', 'name': 'codebox'}, - ) - - slug = fields.SlugField(max_length=50, primary_key=True) - codebox = fields.IntegerField(label='codebox id') - public = fields.BooleanField(required=False, default=False) - public_link = fields.ChoiceField(required=False, read_only=True) - links = fields.HyperlinkedField(links=LINKS) - - please = WebhookManager() - - class Meta: - parent = Instance - endpoints = { - 'detail': { - 'methods': ['put', 'get', 'patch', 'delete'], - 'path': '/webhooks/{slug}/', - }, - 'list': { - 'methods': ['post', 'get'], - 'path': '/webhooks/', - }, - 'run': { - 'methods': ['post'], - 'path': '/webhooks/{slug}/run/', - }, - 'public': { - 'methods': ['get'], - 'path': 'webhooks/p/{public_link}/', - } - } - - def run(self, **payload): - """ - Usage:: - - >>> wh = Webhook.please.get('instance-name', 'webhook-slug') - >>> wh.run() - >>> wh.run(variable_one=1, variable_two=2) - """ - if self.is_new(): - raise SyncanoValidationError('Method allowed only on existing model.') - - properties = self.get_endpoint_data() - endpoint = self._meta.resolve_endpoint('run', properties) - connection = self._get_connection(**payload) - request = { - 'data': { - 'payload': json.dumps(payload) - } - } - response = connection.request('POST', endpoint, **request) - return self.RESULT_CLASS(**response) +from .archetypes import * # NOQA +from .fields import * # NOQA +from .instances import * # NOQA +from .accounts import * # NOQA +from .billing import * # NOQA +from .channels import * # NOQA +from .classes import * # NOQA +from .data_views import * # NOQA +from .incentives import * # NOQA +from .traces import * # NOQA +from .push_notification import * # NOQA +from .geo import * # NOQA +from .backups import * # NOQA +from .hosting import Hosting, HostingFile # NOQA +from .data_views import DataEndpoint as EndpointData # NOQA +from .custom_sockets import * # NOQA +from .custom_sockets_utils import Endpoint, ScriptCall, ScriptDependency, ClassDependency # NOQA diff --git a/syncano/models/billing.py b/syncano/models/billing.py new file mode 100644 index 0000000..89ee8ab --- /dev/null +++ b/syncano/models/billing.py @@ -0,0 +1,72 @@ + + +from . import fields +from .base import Model + + +class Coupon(Model): + """ + OO wrapper around coupons `link `_. + + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar redeem_by: :class:`~syncano.models.fields.DateField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar percent_off: :class:`~syncano.models.fields.IntegerField` + :ivar amount_off: :class:`~syncano.models.fields.FloatField` + :ivar currency: :class:`~syncano.models.fields.ChoiceField` + :ivar duration: :class:`~syncano.models.fields.IntegerField` + """ + + CURRENCY_CHOICES = ( + {'display_name': 'USD', 'value': 'usd'}, + ) + + name = fields.StringField(max_length=32, primary_key=True) + redeem_by = fields.DateField() + links = fields.LinksField() + percent_off = fields.IntegerField(required=False) + amount_off = fields.FloatField(required=False) + currency = fields.ChoiceField(choices=CURRENCY_CHOICES) + duration = fields.IntegerField(default=0) + + class Meta: + endpoints = { + 'detail': { + 'methods': ['get', 'delete'], + 'path': '/v1.1/billing/coupons/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/v1.1/billing/coupons/', + } + } + + +class Discount(Model): + """ + OO wrapper around discounts `link `_. + + :ivar instance: :class:`~syncano.models.fields.ModelField` + :ivar coupon: :class:`~syncano.models.fields.ModelField` + :ivar start: :class:`~syncano.models.fields.DateField` + :ivar end: :class:`~syncano.models.fields.DateField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + """ + + instance = fields.ModelField('Instance') + coupon = fields.ModelField('Coupon') + start = fields.DateField(read_only=True, required=False) + end = fields.DateField(read_only=True, required=False) + links = fields.LinksField() + + class Meta: + endpoints = { + 'detail': { + 'methods': ['get'], + 'path': '/v1.1/billing/discounts/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/v1.1/billing/discounts/', + } + } diff --git a/syncano/models/bulk.py b/syncano/models/bulk.py new file mode 100644 index 0000000..4786ff1 --- /dev/null +++ b/syncano/models/bulk.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +from abc import ABCMeta, abstractmethod + +import six +from syncano.exceptions import SyncanoValidationError, SyncanoValueError + + +class BaseBulkCreate(six.with_metaclass(ABCMeta)): + """ + Helper class for making bulk create; + + Usage: + instances = ObjectBulkCreate(objects, manager).process() + """ + MAX_BATCH_SIZE = 50 + + @abstractmethod + def __init__(self, objects, manager): + self.objects = objects + self.manager = manager + self.response = None + self.validated = False + + def validate(self): + if len(self.objects) > self.MAX_BATCH_SIZE: + raise SyncanoValueError('Only 50 objects can be created at once.') + + def make_batch_request(self): + if not self.validated: + raise SyncanoValueError('Bulk create not validated') + self.response = self.manager.batch(*[o.save() for o in self.objects]) + + def update_response(self, content_reponse): + content_reponse.update(self.manager.properties) + + def process(self): + self.validate() + self.make_batch_request() + return self.response + + +class ObjectBulkCreate(BaseBulkCreate): + + def __init__(self, objects, manager): + super(ObjectBulkCreate, self).__init__(objects, manager) + + def validate(self): + super(ObjectBulkCreate, self).validate() + + class_names = [] + instance_names = [] + # mark objects as lazy & make some check btw; + for o in self.objects: + class_names.append(o.class_name) + instance_names.append(o.instance_name) + o.mark_for_batch() + + if len(set(class_names)) != 1: + raise SyncanoValidationError('Bulk create can handle only objects of the same type.') + + if len(set(instance_names)) != 1: + raise SyncanoValidationError('Bulk create can handle only one instance.') + self.validated = True + + def update_response(self, content_reponse): + super(ObjectBulkCreate, self).update_response(content_reponse) + content_reponse.update( + { + 'class_name': self.objects[0].class_name, + 'instance_name': self.objects[0].instance_name + } + ) + + +class ModelBulkCreate(BaseBulkCreate): + + def __init__(self, objects, manager): + super(ModelBulkCreate, self).__init__(objects, manager) + + def validate(self): + super(ModelBulkCreate, self).validate() + + class_names = [] + # mark objects as lazy & make some check btw; + for o in self.objects: + class_names.append(type(o)) + o.mark_for_batch() + + if len(set(class_names)) != 1: + raise SyncanoValidationError('Bulk create can handle only objects of the same type.') + + self.validated = True diff --git a/syncano/models/channels.py b/syncano/models/channels.py index ae03155..65bdbd3 100644 --- a/syncano/models/channels.py +++ b/syncano/models/channels.py @@ -2,10 +2,11 @@ import six from requests import Timeout - from syncano import logger -from .base import Instance, Model, fields +from . import fields +from .base import Model +from .instances import Instance class PollThread(Thread): @@ -65,7 +66,7 @@ class Channel(Model): """ .. _long polling: http://en.wikipedia.org/wiki/Push_technology#Long_polling - OO wrapper around channels `endpoint `_. + OO wrapper around channels `link http://docs.syncano.io/docs/realtime-communication`_. :ivar name: :class:`~syncano.models.fields.StringField` :ivar type: :class:`~syncano.models.fields.ChoiceField` @@ -103,11 +104,12 @@ class Channel(Model): ) name = fields.StringField(max_length=64, primary_key=True) - type = fields.ChoiceField(choices=TYPE_CHOICES, required=False) + type = fields.ChoiceField(choices=TYPE_CHOICES, required=False, default='default') group = fields.IntegerField(label='group id', required=False) group_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') other_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') - custom_publish = fields.BooleanField(default=False) + custom_publish = fields.BooleanField(default=False, required=False) + links = fields.LinksField() class Meta: parent = Instance @@ -136,7 +138,7 @@ class Meta: def poll(self, room=None, last_id=None, callback=None, error=None, timeout=None): properties = self.get_endpoint_data() - endpoint = self._meta.resolve_endpoint('poll', properties) + endpoint = self._meta.resolve_endpoint('poll', properties, http_method='GET') connection = self._get_connection() thread = PollThread(connection, endpoint, callback, error, timeout=timeout, @@ -146,16 +148,17 @@ def poll(self, room=None, last_id=None, callback=None, error=None, timeout=None) def publish(self, payload, room=None): properties = self.get_endpoint_data() - endpoint = self._meta.resolve_endpoint('publish', properties) + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('publish', properties, http_method) connection = self._get_connection() request = {'data': Message(payload=payload, room=room).to_native()} - response = connection.request('POST', endpoint, **request) + response = connection.request(http_method, endpoint, **request) return Message(**response) class Message(Model): """ - OO wrapper around channel hisotry `endpoint `_. + OO wrapper around channel hisotry `link http://docs.syncano.io/docs/realtime-communication`_. :ivar room: :class:`~syncano.models.fields.StringField` :ivar action: :class:`~syncano.models.fields.ChoiceField` @@ -187,7 +190,7 @@ class Meta: 'path': '/history/{pk}/', }, 'list': { - 'methods': ['get'], + 'methods': ['get', 'post'], 'path': '/history/', }, } diff --git a/syncano/models/classes.py b/syncano/models/classes.py new file mode 100644 index 0000000..0027c04 --- /dev/null +++ b/syncano/models/classes.py @@ -0,0 +1,270 @@ + + +from copy import deepcopy + +from syncano.exceptions import SyncanoValidationError +from syncano.utils import get_class_name + +from . import fields +from .base import Model +from .instances import Instance +from .manager import ObjectManager +from .registry import registry + + +class Class(Model): + """ + OO wrapper around instance classes `link `_. + + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar description: :class:`~syncano.models.fields.StringField` + :ivar objects_count: :class:`~syncano.models.fields.Field` + :ivar schema: :class:`~syncano.models.fields.SchemaField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar status: :class:`~syncano.models.fields.Field` + :ivar metadata: :class:`~syncano.models.fields.JSONField` + :ivar revision: :class:`~syncano.models.fields.IntegerField` + :ivar expected_revision: :class:`~syncano.models.fields.IntegerField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar group: :class:`~syncano.models.fields.IntegerField` + :ivar group_permissions: :class:`~syncano.models.fields.ChoiceField` + :ivar other_permissions: :class:`~syncano.models.fields.ChoiceField` + :ivar objects: :class:`~syncano.models.fields.RelatedManagerField` + + .. note:: + This model is special because each related :class:`~syncano.models.base.Object` will be + **dynamically populated** with fields defined in schema attribute. + """ + + PERMISSIONS_CHOICES = ( + {'display_name': 'None', 'value': 'none'}, + {'display_name': 'Read', 'value': 'read'}, + {'display_name': 'Create objects', 'value': 'create_objects'}, + ) + + name = fields.StringField(max_length=64, primary_key=True) + description = fields.StringField(read_only=False, required=False) + objects_count = fields.Field(read_only=True, required=False) + + schema = fields.SchemaField(read_only=False) + links = fields.LinksField() + status = fields.Field() + metadata = fields.JSONField(read_only=False, required=False) + + revision = fields.IntegerField(read_only=True, required=False) + expected_revision = fields.IntegerField(read_only=False, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + + group = fields.IntegerField(label='group id', required=False) + group_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') + other_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, default='none') + + objects = fields.RelatedManagerField('Object') + + class Meta: + parent = Instance + plural_name = 'Classes' + endpoints = { + 'detail': { + 'methods': ['get', 'put', 'patch', 'delete'], + 'path': '/classes/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/classes/', + } + } + + def save(self, **kwargs): + if self.schema: # do not allow add empty schema to registry; + registry.set_schema(self.name, self.schema.schema) # update the registry schema here; + return super(Class, self).save(**kwargs) + + +class Object(Model): + """ + OO wrapper around data objects `link `_. + + :ivar revision: :class:`~syncano.models.fields.IntegerField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + :ivar owner: :class:`~syncano.models.fields.IntegerField` + :ivar owner_permissions: :class:`~syncano.models.fields.ChoiceField` + :ivar group: :class:`~syncano.models.fields.IntegerField` + :ivar group_permissions: :class:`~syncano.models.fields.ChoiceField` + :ivar other_permissions: :class:`~syncano.models.fields.ChoiceField` + :ivar channel: :class:`~syncano.models.fields.StringField` + :ivar channel_room: :class:`~syncano.models.fields.StringField` + + .. note:: + This model is special because each instance will be **dynamically populated** + with fields defined in related :class:`~syncano.models.base.Class` schema attribute. + """ + + PERMISSIONS_CHOICES = ( + {'display_name': 'None', 'value': 'none'}, + {'display_name': 'Read', 'value': 'read'}, + {'display_name': 'Write', 'value': 'write'}, + {'display_name': 'Full', 'value': 'full'}, + ) + + revision = fields.IntegerField(read_only=True, required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + owner = fields.IntegerField(label='owner id', required=False) + owner_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, required=False) + group = fields.IntegerField(label='group id', required=False) + group_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, required=False) + other_permissions = fields.ChoiceField(choices=PERMISSIONS_CHOICES, required=False) + channel = fields.StringField(required=False) + channel_room = fields.StringField(required=False, max_length=64) + + please = ObjectManager() + + class Meta: + parent = Class + endpoints = { + 'detail': { + 'methods': ['delete', 'post', 'patch', 'get'], + 'path': '/objects/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/objects/', + } + } + + @staticmethod + def __new__(cls, **kwargs): + instance_name = cls._get_instance_name(kwargs) + class_name = cls._get_class_name(kwargs) + if not instance_name: + raise SyncanoValidationError('Field "instance_name" is required.') + + if not class_name: + raise SyncanoValidationError('Field "class_name" is required.') + + model = cls.get_subclass_model(instance_name, class_name) + return model(**kwargs) + + @classmethod + def _set_up_object_class(cls, model): + pass + + @classmethod + def _get_instance_name(cls, kwargs): + return kwargs.get('instance_name') or registry.instance_name + + @classmethod + def _get_class_name(cls, kwargs): + return kwargs.get('class_name') + + @classmethod + def create_subclass(cls, name, schema): + meta = deepcopy(Object._meta) + attrs = { + 'Meta': meta, + '__new__': Model.__new__, # We don't want to have maximum recursion depth exceeded error + 'please': ObjectManager() + } + + model = type(str(name), (Model, ), attrs) + + for field in schema: + field_type = field.get('type') + field_class = fields.MAPPING[field_type] + query_allowed = ('order_index' in field or 'filter_index' in field) + field_class(required=False, read_only=False, query_allowed=query_allowed).contribute_to_class( + model, field.get('name') + ) + + for field in meta.fields: + if field.primary_key: + setattr(model, 'pk', field) + setattr(model, field.name, field) + + cls._set_up_object_class(model) + return model + + @classmethod + def get_or_create_subclass(cls, name, schema): + try: + subclass = registry.get_model_by_name(name) + except LookupError: + subclass = cls.create_subclass(name, schema) + registry.add(name, subclass) + return subclass + + @classmethod + def get_subclass_name(cls, instance_name, class_name): + return get_class_name(instance_name, class_name, 'object') + + @classmethod + def get_class_schema(cls, instance_name, class_name): + schema = registry.get_schema(class_name) + if not schema: + parent = cls._meta.parent + schema = parent.please.get(instance_name, class_name).schema + if schema: # do not allow to add to registry empty schema; + registry.set_schema(class_name, schema) + return schema + + @classmethod + def get_subclass_model(cls, instance_name, class_name, **kwargs): + """ + Creates custom :class:`~syncano.models.base.Object` sub-class definition based + on passed **instance_name** and **class_name**. + """ + model_name = cls.get_subclass_name(instance_name, class_name) + + if cls.__name__ == model_name: + return cls + + try: + model = registry.get_model_by_name(model_name) + except LookupError: + parent = cls._meta.parent + schema = parent.please.get(instance_name, class_name).schema + model = cls.create_subclass(model_name, schema) + registry.add(model_name, model) + + schema = cls.get_class_schema(instance_name, class_name) + + for field in schema: + try: + getattr(model, field['name']) + except AttributeError: + # schema changed, update the registry; + model = cls.create_subclass(model_name, schema) + registry.update(model_name, model) + break + + return model + + +class DataObjectMixin(object): + + @classmethod + def _get_instance_name(cls, kwargs): + return cls.please.properties.get('instance_name') or kwargs.get('instance_name') + + @classmethod + def _get_class_name(cls, kwargs): + return cls.PREDEFINED_CLASS_NAME + + @classmethod + def get_class_object(cls): + return Class.please.get(name=cls.PREDEFINED_CLASS_NAME) + + @classmethod + def _set_up_object_class(cls, model): + for field in model._meta.fields: + if field.has_endpoint_data and field.name == 'class_name': + if not getattr(model, field.name, None): + setattr(model, field.name, getattr(cls, 'PREDEFINED_CLASS_NAME', None)) + setattr(model, 'get_class_object', cls.get_class_object) + setattr(model, '_get_instance_name', cls._get_instance_name) + setattr(model, '_get_class_name', cls._get_class_name) diff --git a/syncano/models/custom_response.py b/syncano/models/custom_response.py new file mode 100644 index 0000000..3881b6d --- /dev/null +++ b/syncano/models/custom_response.py @@ -0,0 +1,125 @@ +import json + +from syncano.exceptions import SyncanoException + + +class CustomResponseHandler(object): + """ + A helper class which allows to define and maintain custom response handlers. + + Consider an example: + Script code:: + + set_response(HttpResponse(status_code=200, content='{"one": 1}', content_type='application/json')) + + When suitable ScriptTrace is used:: + + trace = ScriptTrace.please.get(id=, script=) + + Then trace object will have a content attribute, which will be a dict created from json (simple: json.loads under + the hood); + + So this is possible:: + + trace.content['one'] + + And the trace.content is equal to:: + + {'one': 1} + + The handler can be easily overwrite:: + + def custom_handler(response): + return json.loads(response['response']['content'])['one'] + + trace.response_handler.overwrite_handler('application/json', custom_handler) + + or globally:: + + ScriptTrace.response_handler.overwrite_handler('application/json', custom_handler) + + Then trace.content is equal to:: + 1 + + Currently supported content_types (but any handler can be defined): + * application/json + * text/plain + + """ + def __init__(self): + self.handlers = {} + self.register_handler('application/json', self.json_handler) + self.register_handler('plain/text', self.plain_handler) + + def register_handler(self, content_type, handler): + if content_type in self.handlers: + raise SyncanoException('Handler "{}" already defined. User overwrite_handler instead.'.format(content_type)) + self.handlers[content_type] = handler + + def overwrite_handler(self, content_type, handler): + if content_type not in self.handlers: + raise SyncanoException('Handler "{}" not defined. User register_handler instead.'.format(content_type)) + self.handlers[content_type] = handler + + def process_response(self, response): + content_type = self._find_content_type(response) + try: + return self.handlers[content_type](response) + except KeyError: + return self._default_handler(response) + + @staticmethod + def _find_content_type(response): + if not response: + return None + return response.get('response', {}).get('content_type') + + @staticmethod + def _default_handler(response): + if not response: + return None + + if 'response' in response: + return response['response'] + if 'stdout' in response: + return response['stdout'] + + return response + + @staticmethod + def json_handler(response): + return json.loads(response['response']['content']) + + @staticmethod + def plain_handler(response): + return response['response']['content'] + + +class CustomResponseMixin(object): + """ + A mixin which extends the Script and ScriptEndpoint traces (and any other Model - if used) with following fields: + * content - This is the response data if set_response is used in Script code, otherwise it is the 'stdout' field; + * content_type - The content_type specified by the user in Script code; + * status_code - The status_code specified by the user in Script code; + * error - An error which can occur when code is executed: the stderr response field; + + To process the content based on content_type this Mixin uses the CustomResponseHandler - see the docs there. + """ + + response_handler = CustomResponseHandler() + + @property + def content(self): + return self.response_handler.process_response(self.result) + + @property + def status_code(self): + return self.result.get('response', {}).get('status') if self.result else None + + @property + def error(self): + return self.result.get('stderr') if self.result else None + + @property + def content_type(self): + return self.result.get('response', {}).get('content_type') if self.result else None diff --git a/syncano/models/custom_sockets.py b/syncano/models/custom_sockets.py new file mode 100644 index 0000000..3e791ec --- /dev/null +++ b/syncano/models/custom_sockets.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +from syncano.exceptions import SyncanoValueError +from syncano.models.custom_sockets_utils import DependencyMetadataMixin, EndpointMetadataMixin + +from . import fields +from .base import Instance, Model + + +class CustomSocket(EndpointMetadataMixin, DependencyMetadataMixin, Model): + """ + OO wrapper around instance custom sockets. + Look at the custom socket documentation for more details. + + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar endpoints: :class:`~syncano.models.fields.JSONField` + :ivar dependencies: :class:`~syncano.models.fields.JSONField` + :ivar metadata: :class:`~syncano.models.fields.JSONField` + :ivar status: :class:`~syncano.models.fields.StringField` + :ivar status_info: :class:`~syncano.models.fields.StringField` + :ivar links: :class:`~syncano.models.fields.LinksField` + """ + + name = fields.StringField(max_length=64, primary_key=True) + description = fields.StringField(required=False) + endpoints = fields.JSONField() + dependencies = fields.JSONField() + metadata = fields.JSONField(required=False) + config = fields.JSONField(required=False) + status = fields.StringField(read_only=True, required=False) + status_info = fields.StringField(read_only=True, required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + links = fields.LinksField() + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['get', 'put', 'patch', 'delete'], + 'path': '/sockets/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/sockets/', + } + } + + def get_endpoints(self): + return SocketEndpoint.get_all_endpoints(instance_name=self.instance_name) + + def run(self, endpoint_name, method='GET', data=None): + endpoint = self._find_endpoint(endpoint_name) + return endpoint.run(method=method, data=data or {}) + + def _find_endpoint(self, endpoint_name): + endpoints = self.get_endpoints() + for endpoint in endpoints: + if '{}/{}'.format(self.name, endpoint_name) == endpoint.name: + return endpoint + raise SyncanoValueError('Endpoint {} not found.'.format(endpoint_name)) + + def install_from_url(self, url, instance_name=None, config=None): + instance_name = self.__class__.please.properties.get('instance_name') or instance_name + instance = Instance.please.get(name=instance_name) + + install_path = instance.links.sockets_install + connection = self._get_connection() + config = config or {} + response = connection.request('POST', install_path, data={ + 'name': self.name, + 'install_url': url, + 'config': config + }) + + return response + + def install(self): + if not self.is_new(): + raise SyncanoValueError('Custom socket already installed.') + + created_socket = self.__class__.please.create( + name=self.name, + endpoints=self.endpoints_data, + dependencies=self.dependencies_data + ) + + created_socket._raw_data['links'] = created_socket._raw_data['links'].links_dict + self.to_python(created_socket._raw_data) + return self + + def update(self): + if self.is_new(): + raise SyncanoValueError('Install socket first.') + + update_socket = self.__class__.please.update( + name=self.name, + endpoints=self.endpoints_data, + dependencies=self.dependencies_data + ) + + update_socket._raw_data['links'] = update_socket._raw_data['links'].links_dict + self.to_python(update_socket._raw_data) + return self + + def recheck(self): + recheck_path = self.links.recheck + connection = self._get_connection() + rechecked_socket = connection.request('POST', recheck_path) + self.to_python(rechecked_socket) + return self + + +class SocketEndpoint(Model): + """ + OO wrapper around endpoints defined in CustomSocket instance. + Look at the custom socket documentation for more details. + + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar calls: :class:`~syncano.models.fields.JSONField` + :ivar links: :class:`~syncano.models.fields.LinksField` + """ + name = fields.StringField(max_length=64, primary_key=True) + allowed_methods = fields.JSONField() + links = fields.LinksField() + + class Meta: + parent = CustomSocket + endpoints = { + 'detail': { + 'methods': ['get'], + 'path': '/endpoints/{name}/' + }, + 'list': { + 'methods': ['get'], + 'path': '/endpoints/' + } + } + + def run(self, method='GET', data=None): + endpoint_path = self.links.self + connection = self._get_connection() + if not self._validate_method(method): + raise SyncanoValueError('Method: {} not specified in calls for this custom socket.'.format(method)) + method = method.lower() + if method in ['get', 'delete']: + response = connection.request(method, endpoint_path) + elif method in ['post', 'put', 'patch']: + response = connection.request(method, endpoint_path, data=data or {}) + else: + raise SyncanoValueError('Method: {} not supported.'.format(method)) + return response + + @classmethod + def get_all_endpoints(cls, instance_name=None): + connection = cls._meta.connection + all_endpoints_path = Instance._meta.resolve_endpoint( + 'endpoints', + {'name': cls.please.properties.get('instance_name') or instance_name} + ) + response = connection.request('GET', all_endpoints_path) + return [cls(**endpoint) for endpoint in response['objects']] + + def _validate_method(self, method): + if '*' in self.allowed_methods or method in self.allowed_methods: + return True + return False diff --git a/syncano/models/custom_sockets_utils.py b/syncano/models/custom_sockets_utils.py new file mode 100644 index 0000000..45d1196 --- /dev/null +++ b/syncano/models/custom_sockets_utils.py @@ -0,0 +1,291 @@ +# -*- coding: utf-8 -*- +import six +from syncano.exceptions import SyncanoValueError + +from .classes import Class +from .incentives import Script, ScriptEndpoint + + +class CallType(object): + """ + The type of the call object used in the custom socket; + """ + SCRIPT = 'script' + + +class DependencyType(object): + """ + The type of the dependency object used in the custom socket; + """ + SCRIPT = 'script' + CLASS = 'class' + + +class BaseCall(object): + """ + Base class for call object. + """ + + call_type = None + + def __init__(self, name, methods): + self.name = name + self.methods = methods + + def to_dict(self): + if self.call_type is None: + raise SyncanoValueError('call_type not set.') + return { + 'type': self.call_type, + 'name': self.name, + 'methods': self.methods + } + + +class ScriptCall(BaseCall): + """ + Script call object. + + The JSON format is as follows (to_dict in the base class):: + + { + 'type': 'script', + 'name': ', + 'methods': [], + } + + methods can be as follows: + * ['GET'] + * ['*'] - which will do a call on every request method; + """ + call_type = CallType.SCRIPT + + +class Endpoint(object): + """ + The object which stores metadata about endpoints in custom socket; + + The JSON format is as follows:: + + { + '': { + 'calls': [ + + ] + } + } + + """ + def __init__(self, name): + self.name = name + self.calls = [] + + def add_call(self, call): + self.calls.append(call) + + def to_endpoint_data(self): + return { + self.name: { + 'calls': [call.to_dict() for call in self.calls] + } + } + + +class BaseDependency(object): + """ + Base dependency object; + + On the base of the fields attribute - the JSON format of the dependency is returned. + The fields are taken from the dependency object - which can be Script (supported now). + """ + + fields = [] + dependency_type = None + name = None + + def to_dependency_data(self): + if self.dependency_type is None: + raise SyncanoValueError('dependency_type not set.') + dependency_data = {'type': self.dependency_type} + dependency_data.update(self.get_dependency_data()) + return dependency_data + + def get_name(self): + if self.name is not None: + return {'name': self.name} + return {'name': self.dependency_object.name} + + def get_dependency_data(self): + raise NotImplementedError() + + def create_from_raw_data(self, raw_data): + raise NotImplementedError() + + def _build_dict(self, instance): + return {field_name: getattr(instance, field_name) for field_name in self.fields} + + +class ScriptDependency(BaseDependency): + """ + Script dependency object; + + The JSON format is as follows:: + { + 'type': 'script', + 'runtime_name': '', + 'source': '', + 'name': '' + } + """ + + dependency_type = DependencyType.SCRIPT + fields = [ + 'runtime_name', + 'source' + ] + + def __init__(self, script_or_script_endpoint, name=None): + if not isinstance(script_or_script_endpoint, (Script, ScriptEndpoint)): + raise SyncanoValueError('Script or ScriptEndpoint expected.') + + if isinstance(script_or_script_endpoint, Script) and not name: + raise SyncanoValueError('Name should be provided.') + + self.dependency_object = script_or_script_endpoint + self.name = name + + def get_dependency_data(self): + + if isinstance(self.dependency_object, ScriptEndpoint): + script = Script.please.get(id=self.dependency_object.script, + instance_name=self.dependency_object.instance_name) + else: + script = self.dependency_object + + dependency_data = self.get_name() + dependency_data.update(self._build_dict(script)) + return dependency_data + + @classmethod + def create_from_raw_data(cls, raw_data): + return cls(**{ + 'script_or_script_endpoint': Script(source=raw_data['source'], runtime_name=raw_data['runtime_name']), + 'name': raw_data['name'], + }) + + +class ClassDependency(BaseDependency): + """ + Class dependency object; + + The JSON format is as follows:: + { + 'type': 'class', + 'name': '', + 'schema': [ + {"name": "f1", "type": "string"}, + {"name": "f2", "type": "string"}, + {"name": "f3", "type": "integer"} + ], + } + """ + dependency_type = DependencyType.CLASS + fields = [ + 'name', + 'schema' + ] + + def __init__(self, class_instance): + self.dependency_object = class_instance + self.name = class_instance.name + + def get_dependency_data(self): + data_dict = self._build_dict(self.dependency_object) + data_dict['schema'] = data_dict['schema'].schema + return data_dict + + @classmethod + def create_from_raw_data(cls, raw_data): + return cls(**{'class_instance': Class(**raw_data)}) + + +class EndpointMetadataMixin(object): + """ + A mixin which allows to collect Endpoints objects and transform them to the appropriate JSON format. + """ + + def __init__(self, *args, **kwargs): + self._endpoints = [] + super(EndpointMetadataMixin, self).__init__(*args, **kwargs) + if self.endpoints: + self.update_endpoints() + + def update_endpoints(self): + for raw_endpoint_name, raw_endpoint in six.iteritems(self.endpoints): + endpoint = Endpoint( + name=raw_endpoint_name, + ) + for call in raw_endpoint['calls']: + call_class = self._get_call_class(call['type']) + call_instance = call_class(name=call['name'], methods=call['methods']) + endpoint.add_call(call_instance) + + self.add_endpoint(endpoint) + + @classmethod + def _get_call_class(cls, call_type): + if call_type == CallType.SCRIPT: + return ScriptCall + + def add_endpoint(self, endpoint): + self._endpoints.append(endpoint) + + def remove_endpoint(self, endpoint_name): + for index, endpoint in enumerate(self._endpoints): + if endpoint.name == endpoint_name: + self._endpoints.pop(index) + break + + @property + def endpoints_data(self): + endpoints = {} + for endpoint in self._endpoints: + endpoints.update(endpoint.to_endpoint_data()) + return endpoints + + +class DependencyMetadataMixin(object): + """ + A mixin which allows to collect Dependencies objects and transform them to the appropriate JSON format. + """ + + def __init__(self, *args, **kwargs): + self._dependencies = [] + super(DependencyMetadataMixin, self).__init__(*args, **kwargs) + if self.dependencies: + self.update_dependencies() + + def update_dependencies(self): + for raw_depedency in self.dependencies: + depedency_class = self._get_depedency_klass(raw_depedency['type']) + self.add_dependency(depedency_class.create_from_raw_data(raw_depedency)) + + @classmethod + def _get_depedency_klass(cls, depedency_type): + if depedency_type == DependencyType.SCRIPT: + return ScriptDependency + elif depedency_type == DependencyType.CLASS: + return ClassDependency + + def add_dependency(self, depedency): + self._dependencies.append(depedency) + + def remove_dependency(self, dependency_name): + for index, dependency in enumerate(self._dependencies): + if dependency_name == getattr(dependency.dependency_object, dependency.id_name, None): + self._dependencies.pop(index) + break + + @property + def dependencies_data(self): + return [dependency.to_dependency_data() for dependency in self._dependencies] diff --git a/syncano/models/data_views.py b/syncano/models/data_views.py new file mode 100644 index 0000000..69dd488 --- /dev/null +++ b/syncano/models/data_views.py @@ -0,0 +1,132 @@ +import json + +import six +from syncano.exceptions import SyncanoValueError +from syncano.models.incentives import ResponseTemplate + +from . import fields +from .base import Model, Object +from .instances import Instance + + +class DataEndpoint(Model): + """ + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar description: :class:`~syncano.models.fields.StringField` + :ivar query: :class:`~syncano.models.fields.SchemaField` + :ivar class_name: :class:`~syncano.models.fields.StringField` + :ivar excluded_fields: :class:`~syncano.models.fields.StringField` + :ivar expand: :class:`~syncano.models.fields.StringField` + :ivar order_by: :class:`~syncano.models.fields.StringField` + :ivar page_size: :class:`~syncano.models.fields.IntegerField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + """ + + PERMISSIONS_CHOICES = ( + {'display_name': 'None', 'value': 'none'}, + {'display_name': 'Read', 'value': 'read'}, + {'display_name': 'Write', 'value': 'write'}, + {'display_name': 'Full', 'value': 'full'}, + ) + + name = fields.StringField(max_length=64, primary_key=True) + description = fields.StringField(required=False) + + query = fields.JSONField(read_only=False, required=False) + + class_name = fields.StringField(label='class name', mapping='class') + + excluded_fields = fields.StringField(required=False) + expand = fields.StringField(required=False) + order_by = fields.StringField(required=False) + page_size = fields.IntegerField(required=False) + + links = fields.LinksField() + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['get', 'put', 'patch', 'delete'], + 'path': '/endpoints/data/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/endpoints/data/', + }, + 'get': { + 'methods': ['get'], + 'path': '/endpoints/data/{name}/get/', + }, + 'rename': { + 'methods': ['post'], + 'path': '/endpoints/data/{name}/rename/', + }, + 'clear_cache': { + 'methods': ['post'], + 'path': '/endpoints/data/{name}/clear_cache/', + } + } + + def rename(self, new_name): + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('rename', properties, http_method) + connection = self._get_connection() + return connection.request(http_method, + endpoint, + data={'new_name': new_name}) + + def clear_cache(self): + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('clear_cache', properties, http_method) + connection = self._get_connection() + return connection.request(http_method, endpoint) + + def get(self, cache_key=None, response_template=None, **kwargs): + connection = self._get_connection() + properties = self.get_endpoint_data() + query = Object.please._build_query(query_data=kwargs, class_name=self.class_name) + + http_method = 'GET' + endpoint = self._meta.resolve_endpoint('get', properties, http_method) + + kwargs = {} + params = {} + params.update({'query': json.dumps(query)}) + + if cache_key is not None: + params = {'cache_key': cache_key} + + if params: + kwargs = {'params': params} + + if response_template: + template_name = self._get_response_template_name(response_template) + kwargs['headers'] = { + 'X-TEMPLATE-RESPONSE': template_name + } + + while endpoint is not None: + response = connection.request(http_method, endpoint, **kwargs) + if isinstance(response, six.string_types): + endpoint = None + yield response + else: + endpoint = response.get('next') + for obj in response['objects']: + yield obj + + def _get_response_template_name(self, response_template): + name = response_template + if isinstance(response_template, ResponseTemplate): + name = response_template.name + if not isinstance(name, six.string_types): + raise SyncanoValueError( + 'Invalid response_template. Must be template\'s name or ResponseTemplate object.' + ) + return name + + def add_object(self, **kwargs): + return Object(instance_name=self.instance_name, class_name=self.class_name, **kwargs).save() diff --git a/syncano/models/fields.py b/syncano/models/fields.py index e36e94d..967fd64 100644 --- a/syncano/models/fields.py +++ b/syncano/models/fields.py @@ -4,13 +4,28 @@ import six import validictory - -from syncano import logger +from syncano import PUSH_ENV, logger from syncano.exceptions import SyncanoFieldError, SyncanoValueError from syncano.utils import force_text -from .manager import RelatedManagerDescriptor, SchemaManager +from .geo import Distance, GeoPoint +from .manager import SchemaManager from .registry import registry +from .relations import RelationManager, RelationValidatorMixin + + +class JSONToPythonMixin(object): + + def to_python(self, value): + if value is None: + return + + if isinstance(value, six.string_types): + try: + value = json.loads(value) + except (ValueError, TypeError): + raise SyncanoValueError('Invalid value: can not be parsed') + return value class Field(object): @@ -26,8 +41,10 @@ class Field(object): has_endpoint_data = False query_allowed = True + allow_increment = False creation_counter = 0 + field_lookups = [] def __init__(self, name=None, **kwargs): self.name = name @@ -37,6 +54,7 @@ def __init__(self, name=None, **kwargs): self.read_only = kwargs.pop('read_only', self.read_only) self.blank = kwargs.pop('blank', self.blank) self.label = kwargs.pop('label', None) + self.mapping = kwargs.pop('mapping', None) self.max_length = kwargs.pop('max_length', None) self.min_length = kwargs.pop('min_length', None) self.query_allowed = kwargs.pop('query_allowed', self.query_allowed) @@ -60,6 +78,8 @@ def __eq__(self, other): def __lt__(self, other): if isinstance(other, Field): return self.creation_counter < other.creation_counter + if isinstance(other, int): + return self.creation_counter < other return NotImplemented def __hash__(self): # pragma: no cover @@ -74,12 +94,13 @@ def __unicode__(self): return six.u(repr(self)) def __get__(self, instance, owner): - return instance._raw_data.get(self.name, self.default) + if instance is not None: + return instance._raw_data.get(self.name, self.default) def __set__(self, instance, value): if self.read_only and value and instance._raw_data.get(self.name): - logger.warning('Field "{0}"" is read only, ' - 'your changes will not be saved.'.format(self.name)) + logger.debug('Field "{0}"" is read only, ' + 'your changes will not be saved.'.format(self.name)) instance._raw_data[self.name] = self.to_python(value) @@ -118,7 +139,7 @@ def to_native(self, value): """ return value - def to_query(self, value, lookup_type): + def to_query(self, value, lookup_type, **kwargs): """ Returns field's value prepared for usage in HTTP request query. """ @@ -157,6 +178,27 @@ def contribute_to_class(self, cls, name): setattr(self, 'ValidationError', error_class) +class RelatedManagerField(Field): + + def __init__(self, model_name, endpoint='list', *args, **kwargs): + super(RelatedManagerField, self).__init__(*args, **kwargs) + self.model_name = model_name + self.endpoint = endpoint + + def __get__(self, instance, owner=None): + if instance is None: + raise AttributeError("RelatedManager is accessible only via {0} instances.".format(owner.__name__)) + + Model = registry.get_model_by_name(self.model_name) + method = getattr(Model.please, self.endpoint, Model.please.all) + properties = instance._meta.get_endpoint_properties('detail') + properties = [getattr(instance, prop) for prop in properties] + return method(*properties) + + def contribute_to_class(self, cls, name): + setattr(cls, name, self) + + class PrimaryKeyField(Field): primary_key = True @@ -173,6 +215,16 @@ class EndpointField(WritableField): class StringField(WritableField): + field_lookups = [ + 'startswith', + 'endswith', + 'contains', + 'istartswith', + 'iendswith', + 'icontains', + 'ieq', + ] + def to_python(self, value): value = super(StringField, self).to_python(value) @@ -182,12 +234,13 @@ def to_python(self, value): class IntegerField(WritableField): + allow_increment = True def to_python(self, value): value = super(IntegerField, self).to_python(value) if value is None: - return value + return try: return int(value) except (TypeError, ValueError): @@ -207,12 +260,13 @@ def to_python(self, value): class FloatField(WritableField): + allow_increment = True def to_python(self, value): value = super(FloatField, self).to_python(value) if value is None: - return value + return try: return float(value) except (TypeError, ValueError): @@ -224,13 +278,13 @@ class BooleanField(WritableField): def to_python(self, value): value = super(BooleanField, self).to_python(value) - if value in (True, False): - return bool(value) + if value is None: + return - if value in ('t', 'True', '1'): + if value in (True, 't', 'true', 'True', '1'): return True - if value in ('f', 'False', '0'): + if value in (False, 'f', 'false', 'False', '0'): return False raise self.ValidationError('Invalid value. Value should be a boolean.') @@ -275,7 +329,7 @@ def __init__(self, *args, **kwargs): def validate(self, value, model_instance): super(ChoiceField, self).validate(value, model_instance) - if self.choices and value not in self.allowed_values: + if self.choices and value is not None and value not in self.allowed_values: raise self.ValidationError("Value '{0}' is not a valid choice.".format(value)) @@ -288,7 +342,7 @@ def to_python(self, value): value = super(DateField, self).to_python(value) if value is None: - return value + return if isinstance(value, datetime): return value.date() @@ -327,7 +381,7 @@ class DateTimeField(DateField): def to_python(self, value): if value is None: - return value + return if isinstance(value, dict) and 'type' in value and 'value' in value: value = value['value'] @@ -373,8 +427,8 @@ def parse_from_date(self, value): def to_native(self, value): if value is None: - return value - ret = value.isoformat() + return + ret = value.strftime(self.FORMAT) if ret.endswith('+00:00'): ret = ret[:-6] + 'Z' @@ -384,25 +438,42 @@ def to_native(self, value): return ret -class HyperlinkedField(Field): +class LinksWrapper(object): + + def __init__(self, links_dict, ignored_links): + self.links_dict = links_dict + self.ignored_links = ignored_links + + def __getattribute__(self, item): + try: + return super(LinksWrapper, self).__getattribute__(item) + except AttributeError: + value = self.links_dict.get(item) + if not value: + item = item.replace('_', '-') + value = self.links_dict.get(item) + + if not value: + raise + + return value + + def to_native(self): + return self.links_dict + + +class LinksField(Field): query_allowed = False IGNORED_LINKS = ('self', ) def __init__(self, *args, **kwargs): - self.links = kwargs.pop('links', []) - super(HyperlinkedField, self).__init__(*args, **kwargs) - - def contribute_to_class(self, cls, name): - super(HyperlinkedField, self).contribute_to_class(cls, name) + super(LinksField, self).__init__(*args, **kwargs) - for link in self.links: - name = link['name'] - endpoint = link['type'] - - if name in self.IGNORED_LINKS: - continue + def to_python(self, value): + return LinksWrapper(value, self.IGNORED_LINKS) - setattr(cls, name, RelatedManagerDescriptor(self, name, endpoint)) + def to_native(self, value): + return value.to_native() class ModelField(Field): @@ -410,6 +481,7 @@ class ModelField(Field): def __init__(self, rel, *args, **kwargs): self.rel = rel self.just_pk = kwargs.pop('just_pk', True) + self.is_data_object_mixin = kwargs.pop('is_data_object_mixin', False) super(ModelField, self).__init__(*args, **kwargs) def contribute_to_class(self, cls, name): @@ -433,15 +505,17 @@ def validate(self, value, model_instance): super(ModelField, self).validate(value, model_instance) if not isinstance(value, (self.rel, dict)): - raise self.ValidationError('Value needs to be a {0} instance.'.format(self.rel.__name__)) + if not isinstance(value, (self.rel, dict)) and not self.is_data_object_mixin: + raise self.ValidationError('Value needs to be a {0} instance.'.format(self.rel.__name__)) - if self.required and isinstance(value, self.rel): + if (self.required and isinstance(value, self.rel)) or \ + (self.is_data_object_mixin and hasattr(value, 'validate')): value.validate() def to_python(self, value): if value is None: - return value + return if isinstance(value, self.rel): return value @@ -453,7 +527,7 @@ def to_python(self, value): def to_native(self, value): if value is None: - return value + return if isinstance(value, self.rel): if not self.just_pk: @@ -463,10 +537,22 @@ def to_native(self, value): pk_value = getattr(value, pk_field.name) return pk_field.to_native(pk_value) + if self.is_data_object_mixin and not self.just_pk and hasattr(value, 'to_native'): + return value.to_native() + return value -class JSONField(WritableField): +class FileField(WritableField): + param_name = 'files' + + def to_native(self, value): + if isinstance(value, six.string_types): + return None + return {self.name: value} + + +class JSONField(JSONToPythonMixin, WritableField): query_allowed = False schema = None @@ -482,24 +568,64 @@ def validate(self, value, model_instance): except ValueError as e: raise self.ValidationError(e) - def to_python(self, value): - if value is None: - return value - - if isinstance(value, six.string_types): - value = json.loads(value) - return value - def to_native(self, value): if value is None: - return value + return if not isinstance(value, six.string_types): value = json.dumps(value) return value +class ArrayField(JSONToPythonMixin, WritableField): + + def validate(self, value, model_instance): + super(ArrayField, self).validate(value, model_instance) + + if not self.required and not value: + return + + if isinstance(value, six.string_types): + try: + value = json.loads(value) + except (ValueError, TypeError): + raise SyncanoValueError('Expected an array') + + if isinstance(value, dict): + if len(value) != 1 or len(set(value.keys()).intersection(['_add', '_remove', '_addunique'])) != 1: + raise SyncanoValueError('Wrong value: one operation at the time.') + + elif not isinstance(value, list): + raise SyncanoValueError('Expected an array') + + value_to_check = value if isinstance(value, list) else value.values()[0] + + for element in value_to_check: + if not isinstance(element, six.string_types + (bool, int, float)): + raise SyncanoValueError( + 'Currently supported types for array items are: string types, bool, float and int') + + +class ObjectField(JSONToPythonMixin, WritableField): + + def validate(self, value, model_instance): + super(ObjectField, self).validate(value, model_instance) + + if not self.required and not value: + return + + if isinstance(value, six.string_types): + try: + value = json.loads(value) + except (ValueError, TypeError): + raise SyncanoValueError('Expected an object') + + if not isinstance(value, dict): + raise SyncanoValueError('Expected an object') + + class SchemaField(JSONField): + required = False query_allowed = False not_indexable_types = ['text', 'file'] schema = { @@ -522,7 +648,11 @@ class SchemaField(JSONField): 'boolean', 'datetime', 'file', - 'reference' + 'reference', + 'relation', + 'array', + 'object', + 'geopoint', ], }, 'order_index': { @@ -542,6 +672,9 @@ class SchemaField(JSONField): } def validate(self, value, model_instance): + if value is None: + return + if isinstance(value, SchemaManager): value = value.schema @@ -571,16 +704,209 @@ def to_native(self, value): return super(SchemaField, self).to_native(value) +class PushJSONField(JSONField): + def to_native(self, value): + if value is None: + return + + if not isinstance(value, six.string_types): + if 'environment' not in value: + value.update({ + 'environment': PUSH_ENV, + }) + value = json.dumps(value) + return value + + +class ListField(WritableField): + + def validate(self, value, model_instance): + if value is None: + return + + if not isinstance(value, list): + raise self.ValidationError('List expected.') + + +class GeoPointField(Field): + + field_lookups = ['near', 'exists'] + + def validate(self, value, model_instance): + super(GeoPointField, self).validate(value, model_instance) + + if not self.required and not value: + return + + if isinstance(value, six.string_types): + try: + value = json.loads(value) + except (ValueError, TypeError): + raise SyncanoValueError('Expected an object') + + if not isinstance(value, GeoPoint): + raise SyncanoValueError('Expected a GeoPoint') + + def to_native(self, value): + if value is None: + return + + if isinstance(value, bool): + return value # exists lookup + + if isinstance(value, dict): + value = GeoPoint(latitude=value['latitude'], longitude=value['longitude']) + + if isinstance(value, tuple): + geo_struct = value[0].to_native() + else: + geo_struct = value.to_native() + + geo_struct = json.dumps(geo_struct) + + return geo_struct + + def to_query(self, value, lookup_type, **kwargs): + """ + Returns field's value prepared for usage in HTTP request query. + """ + super(GeoPointField, self).to_query(value, lookup_type, **kwargs) + + if lookup_type not in self.field_lookups: + raise SyncanoValueError('Lookup {} not supported for geopoint field'.format(lookup_type)) + + if lookup_type in ['exists']: + if isinstance(value, bool): + return value + else: + raise SyncanoValueError('Bool expected in {} lookup.'.format(lookup_type)) + + if isinstance(value, dict): + value = ( + GeoPoint(latitude=value.pop('latitude'), longitude=value.pop('longitude')), + Distance(**value) + ) + + if len(value) != 2 or not isinstance(value[0], GeoPoint) or not isinstance(value[1], Distance): + raise SyncanoValueError('This lookup should be a tuple with GeoPoint and Distance: ' + '__near=(GeoPoint(52.12, 22.12), Distance(kilometers=100))') + + query_dict = value[0].to_native() + query_dict.update(value[1].to_native()) + + return query_dict + + def to_python(self, value): + if value is None: + return + + value = self._process_string_types(value) + + if isinstance(value, GeoPoint): + return value + + latitude, longitude = self._process_value(value) + + if not latitude or not longitude: + raise SyncanoValueError('Expected the `longitude` and `latitude` fields.') + + return GeoPoint(latitude=latitude, longitude=longitude) + + @classmethod + def _process_string_types(cls, value): + if isinstance(value, six.string_types): + try: + return json.loads(value) + except (ValueError, TypeError): + raise SyncanoValueError('Invalid value: can not be parsed.') + return value + + @classmethod + def _process_value(cls, value): + longitude = None + latitude = None + + if isinstance(value, dict): + latitude = value.get('latitude') + longitude = value.get('longitude') + elif isinstance(value, (tuple, list)): + try: + latitude = value[0] + longitude = value[1] + except IndexError: + raise SyncanoValueError('Can not parse the geo point.') + + return latitude, longitude + + +class RelationField(RelationValidatorMixin, WritableField): + query_allowed = True + field_lookups = ['contains', 'is'] + + def __call__(self, instance, field_name): + return RelationManager(instance=instance, field_name=field_name) + + def to_python(self, value): + if not value: + return None + + if isinstance(value, dict) and 'type' in value and 'value' in value: + value = value['value'] + + if isinstance(value, dict) and ('_add' in value or '_remove' in value): + return value + + if not isinstance(value, (list, tuple)): + return [value] + + return value + + def to_query(self, value, lookup_type, related_field_name=None, related_field_lookup=None, **kwargs): + + if not self.query_allowed: + raise self.ValidationError('Query on this field is not supported.') + + if lookup_type not in self.field_lookups: + raise SyncanoValueError('Lookup {} not supported for relation field.'.format(lookup_type)) + + query_dict = {} + + if lookup_type == 'contains': + if self._check_relation_value(value): + value = [obj.id for obj in value] + query_dict = value + + if lookup_type == 'is': + query_dict = {related_field_name: {"_{0}".format(related_field_lookup): value}} + + return query_dict + + def to_native(self, value): + if not value: + return None + + if isinstance(value, dict) and ('_add' in value or '_remove' in value): + return value + + if not isinstance(value, (list, tuple)): + value = [value] + + if self._check_relation_value(value): + value = [obj.id for obj in value] + return value + + MAPPING = { 'string': StringField, 'text': StringField, - 'file': StringField, + 'file': FileField, 'ref': StringField, 'reference': ReferenceField, + 'relation': RelationField, 'integer': IntegerField, 'float': FloatField, 'boolean': BooleanField, - 'slug': SlugField, + 'name': SlugField, 'email': EmailField, 'choice': ChoiceField, 'date': DateField, @@ -588,8 +914,11 @@ def to_native(self, value): 'field': Field, 'writable': WritableField, 'endpoint': EndpointField, - 'links': HyperlinkedField, + 'links': LinksField, 'model': ModelField, 'json': JSONField, 'schema': SchemaField, + 'array': ArrayField, + 'object': ObjectField, + 'geopoint': GeoPointField, } diff --git a/syncano/models/geo.py b/syncano/models/geo.py new file mode 100644 index 0000000..3b1e68d --- /dev/null +++ b/syncano/models/geo.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +from syncano.exceptions import SyncanoValueError + + +class GeoPoint(object): + + def __init__(self, latitude, longitude): + self.latitude = latitude + self.longitude = longitude + + def __repr__(self): + return "GeoPoint(latitude={}, longitude={})".format(self.latitude, self.longitude) + + def to_native(self): + geo_struct_dump = {'latitude': self.latitude, 'longitude': self.longitude} + return geo_struct_dump + + +class Distance(object): + + KILOMETERS = '_in_kilometers' + MILES = '_in_miles' + + def __init__(self, kilometers=None, miles=None): + if kilometers is not None and miles is not None: + raise SyncanoValueError('`kilometers` and `miles` can not be set at the same time.') + + if kilometers is None and miles is None: + raise SyncanoValueError('`kilometers` or `miles` attribute should be specified.') + + self.distance = kilometers or miles + self.unit = self.KILOMETERS if kilometers is not None else self.MILES + + def to_native(self): + return { + 'distance{}'.format(self.unit): self.distance + } diff --git a/syncano/models/hosting.py b/syncano/models/hosting.py new file mode 100644 index 0000000..48b95e4 --- /dev/null +++ b/syncano/models/hosting.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- + +from . import fields +from .base import Model +from .instances import Instance + + +class Hosting(Model): + """ + OO wrapper around hosting. + """ + + name = fields.StringField(max_length=253) + is_default = fields.BooleanField(read_only=True) + is_active = fields.BooleanField(default=True) + description = fields.StringField(read_only=False, required=False) + domains = fields.ListField(default=[]) + + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'get', 'put', 'patch'], + 'path': '/hosting/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/hosting/', + } + } + + def upload_file(self, path, file): + """ + Upload a new file to the hosting. + :param path: the file path; + :param file: the file to be uploaded; + :return: the response from the API; + """ + files_path = self.links.files + data = {'path': path} + connection = self._get_connection() + headers = self._prepare_header(connection) + response = connection.session.post('{}{}'.format(connection.host, files_path), headers=headers, + data=data, files=[('file', file)]) + if response.status_code != 201: + return + return HostingFile(**response.json()) + + def update_file(self, path, file): + """ + Updates an existing file. + :param path: the file path; + :param file: the file to be uploaded; + :return: the response from the API; + """ + hosting_files = self._get_files() + is_found = False + + for hosting_file in hosting_files: + if hosting_file.path == path: + is_found = True + break + + if not is_found: + # create if not found; + hosting_file = self.upload_file(path, file) + return hosting_file + + connection = self._get_connection() + headers = self._prepare_header(connection) + response = connection.session.patch('{}{}'.format(connection.host, hosting_file.links.self), headers=headers, + files=[('file', file)]) + if response.status_code != 200: + return + return HostingFile(**response.json()) + + def list_files(self): + return self._get_files() + + def set_default(self): + default_path = self.links.set_default + connection = self._get_connection() + + response = connection.make_request('POST', default_path) + self.to_python(response) + return self + + def _prepare_header(self, connection): + params = connection.build_params(params={}) + headers = params['headers'] + headers.pop('content-type') + return headers + + def _get_files(self): + return [hfile for hfile in HostingFile.please.list(hosting_id=self.id)] + + +class HostingFile(Model): + """ + OO wrapper around hosting file. + """ + + path = fields.StringField(max_length=300) + file = fields.FileField() + links = fields.LinksField() + + class Meta: + parent = Hosting + endpoints = { + 'detail': { + 'methods': ['delete', 'get', 'put', 'patch'], + 'path': '/files/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/files/', + } + } diff --git a/syncano/models/incentives.py b/syncano/models/incentives.py new file mode 100644 index 0000000..84ef2e4 --- /dev/null +++ b/syncano/models/incentives.py @@ -0,0 +1,363 @@ + + +import json + +from syncano.exceptions import SyncanoValidationError + +from . import fields +from .base import Model +from .instances import Instance +from .manager import ScriptEndpointManager, ScriptManager +from .mixins import RenameMixin + + +class RuntimeChoices(object): + """ + Store available Script runtimes; + """ + PYTHON = 'python' + PYTHON_V4_2 = 'python_library_v4.2' # python old library; + PYTHON_V5_0 = 'python_library_v5.0' # python >5.0 library not backward compatible; + NODEJS = 'nodejs' + NODEJS_V0_4 = 'nodejs_library_v0.4' # nodejs old library; + NODEJS_V1_0 = 'nodejs_library_v1.0' # nodejs >1.0 library, not backward compatible; + GOLANG = 'golang' + SWIFT = 'swift' + PHP = 'php' + RUBY = 'ruby' + + +class Script(Model): + """ + OO wrapper around scripts `link `_. + + :ivar label: :class:`~syncano.models.fields.StringField` + :ivar description: :class:`~syncano.models.fields.StringField` + :ivar source: :class:`~syncano.models.fields.StringField` + :ivar runtime_name: :class:`~syncano.models.fields.ChoiceField` + :ivar config: :class:`~syncano.models.fields.Field` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + + .. note:: + **Script** has special method called ``run`` which will execute attached source code:: + + >>> Script.please.run('instance-name', 1234) + >>> Script.please.run('instance-name', 1234, payload={'variable_one': 1, 'variable_two': 2}) + >>> Script.please.run('instance-name', 1234, payload='{"variable_one": 1, "variable_two": 2}') + + or via instance:: + + >>> s = Script.please.get('instance-name', 1234) + >>> s.run() + >>> s.run(variable_one=1, variable_two=2) + """ + + label = fields.StringField(max_length=80, required=False) + description = fields.StringField(required=False) + source = fields.StringField() + runtime_name = fields.StringField() + config = fields.JSONField(required=False) + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + traces = fields.RelatedManagerField('ScriptTrace') + + please = ScriptManager() + + class Meta: + parent = Instance + name = 'Script' + plural_name = 'Scripts' + endpoints = { + 'detail': { + 'methods': ['put', 'get', 'patch', 'delete'], + 'path': '/snippets/scripts/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/snippets/scripts/', + }, + 'run': { + 'methods': ['post'], + 'path': '/snippets/scripts/{id}/run/', + }, + } + + def run(self, **payload): + """ + Usage:: + + >>> s = Script.please.get('instance-name', 1234) + >>> s.run() + >>> s.run(variable_one=1, variable_two=2) + """ + from .traces import ScriptTrace + + if self.is_new(): + raise SyncanoValidationError('Method allowed only on existing model.') + + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('run', properties, http_method) + connection = self._get_connection(**payload) + request = { + 'data': { + 'payload': json.dumps(payload) + } + } + response = connection.request(http_method, endpoint, **request) + response.update({'instance_name': self.instance_name, 'script_id': self.id}) + return ScriptTrace(**response) + + +class Schedule(Model): + """ + OO wrapper around script schedules `link `_. + + :ivar label: :class:`~syncano.models.fields.StringField` + :ivar script: :class:`~syncano.models.fields.IntegerField` + :ivar interval_sec: :class:`~syncano.models.fields.IntegerField` + :ivar crontab: :class:`~syncano.models.fields.StringField` + :ivar payload: :class:`~syncano.models.fields.HyperliStringFieldnkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar scheduled_next: :class:`~syncano.models.fields.DateTimeField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + """ + + label = fields.StringField(max_length=80) + script = fields.IntegerField(label='script id') + interval_sec = fields.IntegerField(read_only=False, required=False) + crontab = fields.StringField(max_length=40, required=False) + payload = fields.StringField(required=False) + timezone = fields.StringField(required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + scheduled_next = fields.DateTimeField(read_only=True, required=False) + links = fields.LinksField() + + traces = fields.RelatedManagerField('ScheduleTraces') + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['put', 'get', 'patch', 'delete'], + 'path': '/schedules/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/schedules/', + } + } + + +class Trigger(Model): + """ + OO wrapper around triggers `link `_. + + :ivar label: :class:`~syncano.models.fields.StringField` + :ivar script: :class:`~syncano.models.fields.IntegerField` + :ivar class_name: :class:`~syncano.models.fields.StringField` + :ivar signal: :class:`~syncano.models.fields.ChoiceField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + """ + + SIGNAL_CHOICES = ( + {'display_name': 'post_update', 'value': 'post_update'}, + {'display_name': 'post_create', 'value': 'post_create'}, + {'display_name': 'post_delete', 'value': 'post_delete'}, + ) + + label = fields.StringField(max_length=80) + script = fields.IntegerField(label='script id') + class_name = fields.StringField(label='class name', mapping='class') + signal = fields.ChoiceField(choices=SIGNAL_CHOICES) + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + traces = fields.RelatedManagerField('TriggerTrace') + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['put', 'get', 'patch', 'delete'], + 'path': '/triggers/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/triggers/', + } + } + + +class ScriptEndpoint(Model): + """ + OO wrapper around script endpoints `link `_. + + :ivar name: :class:`~syncano.models.fields.SlugField` + :ivar script: :class:`~syncano.models.fields.IntegerField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + + .. note:: + **ScriptEndpoint** has special method called ``run`` which will execute related script:: + + >>> ScriptEndpoint.please.run('instance-name', 'script-name') + >>> ScriptEndpoint.please.run('instance-name', 'script-name', payload={'variable_one': 1, + 'variable_two': 2}) + >>> ScriptEndpoint.please.run('instance-name', 'script-name', + payload="{\"variable_one\": 1, \"variable_two\": 2}") + + or via instance:: + + >>> se = ScriptEndpoint.please.get('instance-name', 'script-name') + >>> se.run() + >>> se.run(variable_one=1, variable_two=2) + + """ + + name = fields.SlugField(max_length=50, primary_key=True) + script = fields.IntegerField(label='script id') + public = fields.BooleanField(required=False, default=False) + public_link = fields.ChoiceField(required=False, read_only=True) + links = fields.LinksField() + + traces = fields.RelatedManagerField('ScriptEndpointTrace') + please = ScriptEndpointManager() + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['put', 'get', 'patch', 'delete'], + 'path': '/endpoints/scripts/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/endpoints/scripts/', + }, + 'run': { + 'methods': ['post'], + 'path': '/endpoints/scripts/{name}/run/', + }, + 'reset': { + 'methods': ['post'], + 'path': '/endpoints/scripts/{name}/reset_link/', + }, + 'public': { + 'methods': ['get'], + 'path': '/endpoints/scripts/p/{public_link}/{name}/', + } + } + + def run(self, cache_key=None, **payload): + """ + Usage:: + + >>> se = ScriptEndpoint.please.get('instance-name', 'script-name') + >>> se.run() + >>> se.run(variable_one=1, variable_two=2) + """ + from .traces import ScriptEndpointTrace + + if self.is_new(): + raise SyncanoValidationError('Method allowed only on existing model.') + + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('run', properties, http_method) + connection = self._get_connection(**payload) + + params = {} + if cache_key is not None: + params = {'cache_key': cache_key} + + kwargs = {'data': payload} + if params: + kwargs.update({'params': params}) + + response = connection.request(http_method, endpoint, **kwargs) + + if isinstance(response, dict) and 'result' in response and 'stdout' in response['result']: + response.update({'instance_name': self.instance_name, + 'script_name': self.name}) + return ScriptEndpointTrace(**response) + # if script is a custom one, return result 'as-it-is'; + return response + + def reset_link(self): + """ + Usage:: + + >>> se = ScriptEndpoint.please.get('instance-name', 'script-name') + >>> se.reset_link() + """ + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('reset', properties, http_method) + connection = self._get_connection() + + response = connection.request(http_method, endpoint) + self.public_link = response['public_link'] + + +class ResponseTemplate(RenameMixin, Model): + """ + OO wrapper around templates. + + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar content: :class:`~syncano.models.fields.StringField` + :ivar content_type: :class:`~syncano.models.fields.StringField` + :ivar context: :class:`~syncano.models.fields.JSONField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + """ + + name = fields.StringField(max_length=64) + content = fields.StringField(label='content') + content_type = fields.StringField(label='content type') + context = fields.JSONField(label='context') + links = fields.LinksField() + + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['put', 'get', 'patch', 'delete'], + 'path': '/snippets/templates/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/snippets/templates/', + }, + 'render': { + 'methods': ['post'], + 'path': '/snippets/templates/{name}/render/', + }, + } + + def render(self, context=None): + context = context or {} + properties = self.get_endpoint_data() + http_method = 'POST' + endpoint = self._meta.resolve_endpoint('render', properties, http_method) + + connection = self._get_connection() + return connection.request(http_method, endpoint, data={'context': context}) + + def rename(self, new_name): + rename_path = self.links.rename + data = {'new_name': new_name} + connection = self._get_connection() + response = connection.request('POST', rename_path, data=data) + self.to_python(response) + return self diff --git a/syncano/models/instances.py b/syncano/models/instances.py new file mode 100644 index 0000000..788ba05 --- /dev/null +++ b/syncano/models/instances.py @@ -0,0 +1,193 @@ +import json + +import six +from syncano.exceptions import SyncanoValueError + +from . import fields +from .base import Model +from .mixins import RenameMixin + + +class Instance(RenameMixin, Model): + """ + OO wrapper around instances `link `_. + + :ivar name: :class:`~syncano.models.fields.StringField` + :ivar description: :class:`~syncano.models.fields.StringField` + :ivar role: :class:`~syncano.models.fields.Field` + :ivar owner: :class:`~syncano.models.fields.ModelField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar metadata: :class:`~syncano.models.fields.JSONField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + :ivar api_keys: :class:`~syncano.models.fields.RelatedManagerField` + :ivar users: :class:`~syncano.models.fields.RelatedManagerField` + :ivar admins: :class:`~syncano.models.fields.RelatedManagerField` + :ivar scripts: :class:`~syncano.models.fields.RelatedManagerField` + :ivar script_endpoints: :class:`~syncano.models.fields.RelatedManagerField` + :ivar templates: :class:`~syncano.models.fields.RelatedManagerField` + :ivar triggers: :class:`~syncano.models.fields.RelatedManagerField` + :ivar schedules: :class:`~syncano.models.fields.RelatedManagerField` + :ivar classes: :class:`~syncano.models.fields.RelatedManagerField` + :ivar invitations: :class:`~syncano.models.fields.RelatedManagerField` + :ivar gcm_devices: :class:`~syncano.models.fields.RelatedManagerField` + :ivar gcm_messages: :class:`~syncano.models.fields.RelatedManagerField` + :ivar apns_devices: :class:`~syncano.models.fields.RelatedManagerField` + :ivar apns_messages: :class:`~syncano.models.fields.RelatedManagerField` + """ + + name = fields.StringField(max_length=64, primary_key=True) + description = fields.StringField(read_only=False, required=False) + role = fields.Field(read_only=True, required=False) + owner = fields.ModelField('Admin', read_only=True) + links = fields.LinksField() + metadata = fields.JSONField(read_only=False, required=False) + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + # user related fields; + api_keys = fields.RelatedManagerField('ApiKey') + users = fields.RelatedManagerField('User') + admins = fields.RelatedManagerField('Admin') + groups = fields.RelatedManagerField('Group') + + # snippets and data fields; + scripts = fields.RelatedManagerField('Script') + script_endpoints = fields.RelatedManagerField('ScriptEndpoint') + data_endpoints = fields.RelatedManagerField('DataEndpoint') + templates = fields.RelatedManagerField('ResponseTemplate') + + triggers = fields.RelatedManagerField('Trigger') + schedules = fields.RelatedManagerField('Schedule') + classes = fields.RelatedManagerField('Class') + invitations = fields.RelatedManagerField('InstanceInvitation') + hostings = fields.RelatedManagerField('Hosting') + + # push notifications fields; + gcm_devices = fields.RelatedManagerField('GCMDevice') + gcm_messages = fields.RelatedManagerField('GCMMessage') + apns_devices = fields.RelatedManagerField('APNSDevice') + apns_messages = fields.RelatedManagerField('APNSMessage') + + class Meta: + endpoints = { + 'detail': { + 'methods': ['delete', 'patch', 'put', 'get'], + 'path': '/v1.1/instances/{name}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/v1.1/instances/', + }, + 'config': { + 'methods': ['put', 'get'], + 'path': '/v1.1/instances/{name}/snippets/config/', + }, + 'endpoints': { + 'methods': ['get'], + 'path': '/v1.1/instances/{name}/endpoints/sockets/' + } + } + + def get_config(self): + properties = self.get_endpoint_data() + http_method = 'GET' + endpoint = self._meta.resolve_endpoint('config', properties, http_method) + connection = self._get_connection() + return connection.request(http_method, endpoint)['config'] + + def set_config(self, config): + if isinstance(config, six.string_types): + try: + config = json.loads(config) + except (ValueError, TypeError): + raise SyncanoValueError('Config string is not a parsable JSON.') + + if not isinstance(config, dict): + raise SyncanoValueError('Retrieved Config is not a valid dict object.') + + properties = self.get_endpoint_data() + http_method = 'PUT' + endpoint = self._meta.resolve_endpoint('config', properties, http_method) + data = {'config': config} + connection = self._get_connection() + connection.request(http_method, endpoint, data=data) + + +class ApiKey(Model): + """ + OO wrapper around instance api keys `link `_. + + :ivar api_key: :class:`~syncano.models.fields.StringField` + :ivar allow_user_create: :class:`~syncano.models.fields.BooleanField` + :ivar ignore_acl: :class:`~syncano.models.fields.BooleanField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + """ + + api_key = fields.StringField(read_only=True, required=False) + description = fields.StringField(required=False) + allow_user_create = fields.BooleanField(required=False, default=False) + ignore_acl = fields.BooleanField(required=False, default=False) + allow_anonymous_read = fields.BooleanField(required=False, default=False) + links = fields.LinksField() + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['get', 'delete'], + 'path': '/api_keys/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/api_keys/', + } + } + + +class InstanceInvitation(Model): + """ + OO wrapper around instance + invitations `link `_. + + :ivar email: :class:`~syncano.models.fields.EmailField` + :ivar role: :class:`~syncano.models.fields.ChoiceField` + :ivar key: :class:`~syncano.models.fields.StringField` + :ivar state: :class:`~syncano.models.fields.StringField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar created_at: :class:`~syncano.models.fields.DateTimeField` + :ivar updated_at: :class:`~syncano.models.fields.DateTimeField` + """ + from .accounts import Admin + + email = fields.EmailField(max_length=254) + role = fields.ChoiceField(choices=Admin.ROLE_CHOICES) + key = fields.StringField(read_only=True, required=False) + state = fields.StringField(read_only=True, required=False) + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + parent = Instance + name = 'Invitation' + endpoints = { + 'detail': { + 'methods': ['get', 'delete'], + 'path': '/invitations/{id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/invitations/', + } + } + + def resend(self): + """ + Resend the invitation. + :return: InstanceInvitation instance; + """ + resend_path = self.links.resend + connection = self._get_connection() + connection.request('POST', resend_path) # empty response here: 204 no content + return self diff --git a/syncano/models/manager.py b/syncano/models/manager.py index 044301a..61e7ae2 100644 --- a/syncano/models/manager.py +++ b/syncano/models/manager.py @@ -1,11 +1,11 @@ import json from copy import deepcopy -from functools import wraps import six - from syncano.connection import ConnectionMixin -from syncano.exceptions import SyncanoRequestError, SyncanoValueError +from syncano.exceptions import SyncanoRequestError, SyncanoValidationError, SyncanoValueError +from syncano.models.bulk import ModelBulkCreate, ObjectBulkCreate +from syncano.models.manager_mixins import ArrayOperationsMixin, IncrementMixin, clone from .registry import registry @@ -13,16 +13,6 @@ REPR_OUTPUT_SIZE = 20 -def clone(func): - """Decorator which will ensure that we are working on copy of ``self``.""" - - @wraps(func) - def inner(self, *args, **kwargs): - self = self._clone() - return func(self, *args, **kwargs) - return inner - - class ManagerDescriptor(object): def __init__(self, manager): @@ -34,39 +24,11 @@ def __get__(self, instance, owner=None): return self.manager.all() -class RelatedManagerDescriptor(object): - - def __init__(self, field, name, endpoint): - self.field = field - self.name = name - self.endpoint = endpoint - - def __get__(self, instance, owner=None): - if instance is None: - raise AttributeError("RelatedManager is accessible only via {0} instances.".format(owner.__name__)) - - links = getattr(instance, self.field.name) - - if not links: - return None - - path = links[self.name] - - if not path: - return None - - Model = registry.get_model_by_path(path) - method = getattr(Model.please, self.endpoint, Model.please.all) - - properties = instance._meta.get_endpoint_properties('detail') - properties = [getattr(instance, prop) for prop in properties] - - return method(*properties) - - class Manager(ConnectionMixin): """Base class responsible for all ORM (``please``) actions.""" + BATCH_URI = '/v1.1/instances/{name}/batch/' + def __init__(self): self.name = None self.model = None @@ -77,10 +39,13 @@ def __init__(self): self.method = None self.query = {} self.data = {} + self.is_lazy = False + self._filter_kwargs = {} self._limit = None self._serialize = True self._connection = None + self._template = None def __repr__(self): # pragma: no cover data = list(self[:REPR_OUTPUT_SIZE + 1]) @@ -100,11 +65,19 @@ def __len__(self): # pragma: no cover def __iter__(self): # pragma: no cover return iter(self.iterator()) - def __bool__(self): # pragma: no cover - return bool(self.iterator()) + def __nonzero__(self): + try: + self[0] + return True + except IndexError: + return False - def __nonzero__(self): # pragma: no cover - return type(self).__bool__(self) + def __bool__(self): # pragma: no cover + try: + self[0] + return True + except IndexError: + return False def __getitem__(self, k): """ @@ -127,8 +100,123 @@ def __getitem__(self, k): manager.limit(k + 1) return list(manager)[k] - # Object actions + def _set_default_properties(self, endpoint_properties): + for field in self.model._meta.fields: + + is_demanded = field.name in endpoint_properties + has_default = field.default is not None + + if is_demanded and has_default: + self.properties[field.name] = field.default + + def as_batch(self): + self.is_lazy = True + return self + def batch(self, *args): + """ + A convenience method for making a batch request. Only create, update and delete manager method are supported. + Batch request are limited to 50. So the args length should be equal or less than 50. + + Usage:: + + klass = instance.classes.get(name='some_class') + Object.please.batch( + klass.objects.as_batch().delete(id=652), + klass.objects.as_batch().delete(id=653), + ... + ) + + and:: + + Object.please.batch( + klass.objects.as_batch().update(id=652, arg='some_b'), + klass.objects.as_batch().update(id=653, arg='some_b'), + ... + ) + + and:: + + Object.please.batch( + klass.objects.as_batch().create(arg='some_c'), + klass.objects.as_batch().create(arg='some_c'), + ... + ) + + and:: + + Object.please.batch( + klass.objects.as_batch().delete(id=653), + klass.objects.as_batch().update(id=652, arg='some_a'), + klass.objects.as_batch().create(arg='some_c'), + ... + ) + + are posible. + + But:: + + Object.please.batch( + klass.objects.as_batch().get_or_create(id=653, arg='some_a') + ) + + will not work as expected. + + Some snippet for working with instance users:: + + instance = Instance.please.get(name='Nabuchodonozor') + model_users = instance.users.batch( + instance.users.as_batch().delete(id=7), + instance.users.as_batch().update(id=9, username='username_a'), + instance.users.as_batch().create(username='username_b', password='5432'), + ... + ) + + And sample response will be:: + + [{u'code': 204}, , , ...] + + :param args: a arg is on of the: klass.objects.as_batch().create(...), klass.objects.as_batch().update(...), + klass.objects.as_batch().delete(...) + :return: a list with objects corresponding to batch arguments; update and create will return a populated Object, + when delete return a raw response from server (usually a dict: {'code': 204}, sometimes information about not + found resource to delete); + """ + # firstly turn off lazy mode: + self.is_lazy = False + + meta = [] + requests = [] + for arg in args: + if isinstance(arg, list): # update now can return a list; + for nested_arg in arg: + meta.append(nested_arg['meta']) + requests.append(nested_arg['body']) + else: + meta.append(arg['meta']) + requests.append(arg['body']) + + response = self.connection.request( + 'POST', + self.BATCH_URI.format(name=registry.instance_name), + **{'data': {'requests': requests}} + ) + + populated_response = [] + + for meta, res in zip(meta, response): + if res['code'] in [200, 201]: # success response: update or create; + content = res['content'] + model = meta['model'] + properties = meta['properties'] + content.update(properties) + populated_response.append(model(**content)) + else: + populated_response.append(res) + + return populated_response + + # Object actions def create(self, **kwargs): """ A convenience method for creating an object and saving it all in one step. Thus:: @@ -142,13 +230,20 @@ def create(self, **kwargs): are equivalent. """ + data = self.properties.copy() attrs = kwargs.copy() - attrs.update(self.properties) + data.update(attrs) + data.update({'is_lazy': self.is_lazy}) + instance = self._get_instance(data) + + if instance.__class__.__name__ == 'Instance': + registry.set_used_instance(instance.name) - instance = self.model(**attrs) - instance.save() + saved_instance = instance.save() + if not self.is_lazy: + return instance - return instance + return saved_instance def bulk_create(self, *objects): """ @@ -156,13 +251,17 @@ def bulk_create(self, *objects): Usage:: - objects = [{'name': 'test-one'}, {'name': 'test-two'}] - instances = Instance.please.bulk_create(objects) + instance = Instance.please.get(name='instance_a') + instances = instance.users.bulk_create( + User(username='user_a', password='1234'), + User(username='user_b', password='4321') + ) - .. warning:: - This method is not meant to be used with large data sets. + Warning:: + + This method is restricted to handle 50 objects at once. """ - return [self.create(**o) for o in objects] + return ModelBulkCreate(objects, self).process() @clone def get(self, *args, **kwargs): @@ -179,6 +278,59 @@ def get(self, *args, **kwargs): self._filter(*args, **kwargs) return self.request() + @clone + def in_bulk(self, object_ids_list, **kwargs): + """ + A method which allows to bulk get objects; + + Use:: + + response = Classes.please.in_bulk(['test_class', ...]) + + response is: + + > {'test_class': } + + For objects: + + res = Object.please.in_bulk([1, 2], class_name='test_class') + + or + + res = klass.objects.in_bulk([1, 2]) + + response is: + + {1: , 2: {u'content': {u'detail': u'Not found.'}, u'code': 404}} + + + :param object_ids_list: This list expects the primary keys - id in api, a names, ids can be used here; + :return: a dict in which keys are the object_ids_list elements, and values are a populated objects; + """ + self.properties.update(kwargs) + path, defaults = self._get_endpoint_properties() + requests = [ + {'method': 'GET', 'path': '{path}{id}/'.format(path=path, id=object_id)} for object_id in object_ids_list + ] + + response = self.connection.request( + 'POST', + self.BATCH_URI.format(name=registry.instance_name), + **{'data': {'requests': requests}} + ) + + bulk_response = {} + + for object_id, object in zip(object_ids_list, response): + if object['code'] == 200: + data = object['content'].copy() + data.update(self.properties) + bulk_response[object_id] = self.model(**data) + else: + bulk_response[object_id] = object + + return bulk_response + def detail(self, *args, **kwargs): """ Wrapper around ``get`` method. @@ -225,33 +377,133 @@ def get_or_create(self, **kwargs): def delete(self, *args, **kwargs): """ Removes single instance based on provided arguments. + Returns None if deletion went fine. Usage:: - instance = Instance.please.delete('test-one') - instance = Instance.please.delete(name='test-one') + Instance.please.delete('test-one') + Instance.please.delete(name='test-one') """ self.method = 'DELETE' self.endpoint = 'detail' self._filter(*args, **kwargs) - return self.request() + if not self.is_lazy: + return self.request() + + path, defaults = self._get_endpoint_properties() + + return self.model.batch_object(method=self.method, path=path, body=self.data, properties=defaults) + + @clone + def filter(self, **kwargs): + endpoint_fields = [field.name for field in self.model._meta.fields if field.has_endpoint_data] + for kwarg_name in kwargs: + if kwarg_name not in endpoint_fields: + raise SyncanoValueError('Only endpoint properties can be used in filter: {}'.format(endpoint_fields)) + self._filter_kwargs = kwargs + return self @clone def update(self, *args, **kwargs): + if self._filter_kwargs or self.query: # means that .filter() was run; + return self.new_update(**kwargs) + return self.old_update(*args, **kwargs) + + @clone + def new_update(self, **kwargs): + """ + Updates multiple instances based on provided arguments. There to ways to do so: + + 1. Django-style update. + 2. By specifying arguments. + + Usage:: + + objects = Object.please.list(instance_name=INSTANCE_NAME, + class_name='someclass').filter(id=1).update(arg='103') + objects = Object.please.list(instance_name=INSTANCE_NAME, + class_name='someclass').filter(id=1).update(arg='103') + + The return value is a list of objects; + + """ + + model_fields = [field.name for field in self.model._meta.fields if not field.has_endpoint_data] + for field_name in kwargs: + if field_name not in model_fields: + raise SyncanoValueError('This model has not field {}'.format(field_name)) + + self.endpoint = 'detail' + self.method = self.get_allowed_method('PATCH', 'PUT', 'POST') + self.data = kwargs.copy() + + if self._filter_kwargs: # Manager context; + # do a single object update: Class, Instance for example; + self.data.update(self._filter_kwargs) + serialized = self._get_serialized_data() + self._filter(*(), **self.data) # sets the proper self.properties here + + if not self.is_lazy: + return [self.serialize(self.request(), self.model)] + + path, defaults = self._get_endpoint_properties() + return [self.model.batch_object(method=self.method, path=path, body=serialized, properties=defaults)] + + instances = [] # ObjectManager context; + for obj in self: + self._filter(*(), **kwargs) + serialized = self._get_serialized_data() + self.properties.update({'id': obj.id}) + path, defaults = self._get_endpoint_properties() + updated_instance = self.model.batch_object(method=self.method, path=path, body=serialized, + properties=defaults) + + instances.append(updated_instance) # always a batch structure here; + + if not self.is_lazy: + instances = self.batch(instances) + + return instances + + @clone + def old_update(self, *args, **kwargs): """ - Updates single instance based on provided arguments. + Updates single instance based on provided arguments. There to ways to do so: + + 1. Django-style update. + 2. By specifying **data** argument. + The **data** is a dictionary of (field, value) pairs used to update the object. Usage:: + instance = Instance.please.update('test-one', description='new one') + instance = Instance.please.update(name='test-one', description='new one') + instance = Instance.please.update('test-one', data={'description': 'new one'}) instance = Instance.please.update(name='test-one', data={'description': 'new one'}) """ self.endpoint = 'detail' - self.method = self.get_allowed_method('PUT', 'PATCH', 'POST') - self.data = kwargs.pop('data') + self.method = self.get_allowed_method('PATCH', 'PUT', 'POST') + data = kwargs.pop('data', {}) + self.data = kwargs.copy() + self.data.update(data) + + model = self.serialize(self.data, self.model) + + serialized = model.to_native() + + serialized = {k: v for k, v in six.iteritems(serialized) + if k in self.data} + + self.data.update(serialized) self._filter(*args, **kwargs) - return self.request() + + if not self.is_lazy: + return self.request() + + path, defaults = self._get_endpoint_properties() + return self.model.batch_object(method=self.method, path=path, body=self.data, properties=defaults) def update_or_create(self, defaults=None, **kwargs): """ @@ -366,18 +618,18 @@ def limit(self, value): return self @clone - def order_by(self, field): + def ordering(self, order='asc'): """ Sets order of returned objects. Usage:: - instances = Instance.please.order_by('name') + instances = Instance.please.ordering() """ - if not field or not isinstance(field, six.string_types): - raise SyncanoValueError('Order by field needs to be a string.') + if order not in ('asc', 'desc'): + raise SyncanoValueError('Invalid order value.') - self.query['order_by'] = field + self.query['ordering'] = order return self @clone @@ -394,6 +646,21 @@ def raw(self): self._serialize = False return self + @clone + def template(self, name): + """ + Disables serialization. ``request`` method will return raw text. + + Usage:: + + >>> instances = Instance.please.list().template('test') + >>> instances + u'text' + """ + self._serialize = False + self._template = name + return self + @clone def using(self, connection): """ @@ -413,9 +680,27 @@ def contribute_to_class(self, model, name): # pragma: no cover if not self.name: self.name = name + def _get_serialized_data(self): + model = self.serialize(self.data, self.model) + serialized = model.to_native() + serialized = {k: v for k, v in six.iteritems(serialized) + if k in self.data} + self.data.update(serialized) + return serialized + def _filter(self, *args, **kwargs): + properties = self.model._meta.get_endpoint_properties(self.endpoint) + + self._set_default_properties(properties) + if args and self.endpoint: - properties = self.model._meta.get_endpoint_properties(self.endpoint) + # let user get object by 'id' + too_much_properties = len(args) < len(properties) + id_specified = 'id' in properties + + if too_much_properties and id_specified: + properties = ['id'] + mapped_args = {k: v for k, v in zip(properties, args)} self.properties.update(mapped_args) self.properties.update(kwargs) @@ -426,19 +711,24 @@ def _clone(self): manager.name = self.name manager.model = self.model manager._connection = self._connection + manager._template = self._template manager.endpoint = self.endpoint manager.properties = deepcopy(self.properties) manager._limit = self._limit manager.method = self.method manager.query = deepcopy(self.query) + manager._filter_kwargs = deepcopy(self._filter_kwargs) manager.data = deepcopy(self.data) manager._serialize = self._serialize + manager.is_lazy = self.is_lazy return manager def serialize(self, data, model=None): """Serializes passed data to related :class:`~syncano.models.base.Model` class.""" model = model or self.model + if data == '': + return if isinstance(data, model): return data @@ -448,34 +738,45 @@ def serialize(self, data, model=None): properties = deepcopy(self.properties) properties.update(data) - return model(**properties) if self._serialize else data + def build_request(self, request): + if 'params' not in request and self.query: + request['params'] = self.query + + if 'data' not in request and self.data: + request['data'] = self.data + + if 'headers' not in request: + request['headers'] = {} + + if self._template is not None and 'X-TEMPLATE-RESPONSE' not in request['headers']: + request['headers']['X-TEMPLATE-RESPONSE'] = self._template + def request(self, method=None, path=None, **request): """Internal method, which calls Syncano API and returns serialized data.""" meta = self.model._meta method = method or self.method allowed_methods = meta.get_endpoint_methods(self.endpoint) - path = path or meta.resolve_endpoint(self.endpoint, self.properties) + + if not path: + path, defaults = self._get_endpoint_properties() if method.lower() not in allowed_methods: methods = ', '.join(allowed_methods) raise SyncanoValueError('Unsupported request method "{0}" allowed are {1}.'.format(method, methods)) - if 'params' not in request and self.query: - request['params'] = self.query - - if 'data' not in request and self.data: - request['data'] = self.data + self.build_request(request) try: response = self.connection.request(method, path, **request) except SyncanoRequestError as e: if e.status_code == 404: - raise self.model.DoesNotExist + obj_id = path.rsplit('/')[-2] + raise self.model.DoesNotExist("{} not found.".format(obj_id)) raise - if 'next' not in response: + if 'next' not in response and not self._template: return self.serialize(response) return response @@ -494,9 +795,12 @@ def get_allowed_method(self, *methods): def iterator(self): """Pagination handler""" - response = self.request() + response = self._get_response() results = 0 while True: + if self._template: + yield response + break objects = response.get('objects') next_url = response.get('next') @@ -512,11 +816,22 @@ def iterator(self): response = self.request(path=next_url) + def _get_response(self): + return self.request() + + def _get_instance(self, attrs): + return self.model(**attrs) + + def _get_endpoint_properties(self): + defaults = {f.name: f.default for f in self.model._meta.fields if f.default is not None} + defaults.update(self.properties) + return self.model._meta.resolve_endpoint(self.endpoint, defaults), defaults + -class CodeBoxManager(Manager): +class ScriptManager(Manager): """ Custom :class:`~syncano.models.manager.Manager` - class for :class:`~syncano.models.base.CodeBox` model. + class for :class:`~syncano.models.base.Script` model. """ @clone @@ -532,13 +847,13 @@ def run(self, *args, **kwargs): self._filter(*args, **kwargs) self._serialize = False response = self.request() - return registry.CodeBoxTrace(**response) + return registry.ScriptTrace(**response) -class WebhookManager(Manager): +class ScriptEndpointManager(Manager): """ Custom :class:`~syncano.models.manager.Manager` - class for :class:`~syncano.models.base.Webhook` model. + class for :class:`~syncano.models.base.ScriptEndpoint` model. """ @clone @@ -556,10 +871,10 @@ def run(self, *args, **kwargs): response = self.request() # Workaround for circular import - return registry.Webhook.RESULT_CLASS(**response) + return registry.ScriptEndpointTrace(**response) -class ObjectManager(Manager): +class ObjectManager(IncrementMixin, ArrayOperationsMixin, Manager): """ Custom :class:`~syncano.models.manager.Manager` class for :class:`~syncano.models.base.Object` model. @@ -567,23 +882,68 @@ class for :class:`~syncano.models.base.Object` model. LOOKUP_SEPARATOR = '__' ALLOWED_LOOKUPS = [ 'gt', 'gte', 'lt', 'lte', - 'eq', 'neq', 'exists', 'in', + 'eq', 'neq', 'exists', 'in', 'nin', + 'near', 'is', 'contains', + 'startswith', 'endswith', + 'contains', 'istartswith', + 'iendswith', 'icontains', + 'ieq', 'near', ] - def create(self, **kwargs): - attrs = kwargs.copy() - attrs.update(self.properties) - - model = self.model.get_subclass_model(**attrs) - instance = model(**attrs) - instance.save() - - return instance + def __init__(self): + super(ObjectManager, self).__init__() + self._initial_response = None def serialize(self, data, model=None): - model = self.model.get_subclass_model(**self.properties) + model = model or self.model.get_subclass_model(**self.properties) return super(ObjectManager, self).serialize(data, model) + @clone + def count(self): + """ + Return the queryset count; + + Usage:: + + Object.please.list(instance_name='raptor', class_name='some_class').filter(id__gt=600).count() + Object.please.list(instance_name='raptor', class_name='some_class').count() + Object.please.all(instance_name='raptor', class_name='some_class').count() + + :return: The count of the returned objects: count = DataObjects.please.list(...).count(); + """ + self.method = 'GET' + self.query.update({ + 'include_count': True, + 'page_size': 0, + }) + response = self.request() + return response['objects_count'] + + @clone + def with_count(self, page_size=20): + """ + Return the queryset with count; + + Usage:: + + Object.please.list(instance_name='raptor', class_name='some_class').filter(id__gt=600).with_count() + Object.please.list(instance_name='raptor', class_name='some_class').with_count(page_size=30) + Object.please.all(instance_name='raptor', class_name='some_class').with_count() + + :param page_size: The size of the pagination; Default to 20; + :return: The tuple with objects and the count: objects, count = DataObjects.please.list(...).with_count(); + """ + query_data = { + 'include_count': True, + 'page_size': page_size, + } + + self.method = 'GET' + self.query.update(query_data) + response = self.request() + self._initial_response = response + return self, self._initial_response['objects_count'] + @clone def filter(self, **kwargs): """ @@ -593,35 +953,175 @@ def filter(self, **kwargs): objects = Object.please.list('instance-name', 'class-name').filter(henryk__gte='hello') """ + + query = self._build_query(query_data=kwargs) + self.query['query'] = json.dumps(query) + self.method = 'GET' + self.endpoint = 'list' + return self + + def _build_query(self, query_data, **kwargs): query = {} + self.properties.update(**kwargs) model = self.model.get_subclass_model(**self.properties) - for field_name, value in six.iteritems(kwargs): + for field_name, value in six.iteritems(query_data): lookup = 'eq' + model_name = None if self.LOOKUP_SEPARATOR in field_name: - field_name, lookup = field_name.split(self.LOOKUP_SEPARATOR, 1) + model_name, field_name, lookup = self._get_lookup_attributes(field_name) + + # if filter is made on relation field: relation__name__eq='test'; + if model_name: + for field in model._meta.fields: + if field.name == model_name: + break + # if filter is made on normal field: name__eq='test'; + else: + for field in model._meta.fields: + if field.name == field_name: + break + + self._validate_lookup(model, model_name, field_name, lookup, field) + + query_main_lookup, query_main_field = self._get_main_lookup(model_name, field_name, lookup) + + query.setdefault(query_main_field, {}) + query[query_main_field]['_{0}'.format(query_main_lookup)] = field.to_query( + value, + query_main_lookup, + related_field_name=field_name, + related_field_lookup=lookup, + ) + return query + + def _get_lookup_attributes(self, field_name): + try: + model_name, field_name, lookup = field_name.split(self.LOOKUP_SEPARATOR, 2) + except ValueError: + model_name = None + field_name, lookup = field_name.split(self.LOOKUP_SEPARATOR, 1) - if field_name not in model._meta.field_names: - allowed = ', '.join(model._meta.field_names) - raise SyncanoValueError('Invalid field name "{0}" allowed are {1}.'.format(field_name, allowed)) + return model_name, field_name, lookup - if lookup not in self.ALLOWED_LOOKUPS: - allowed = ', '.join(self.ALLOWED_LOOKUPS) - raise SyncanoValueError('Invalid lookup type "{0}" allowed are {1}.'.format(lookup, allowed)) + def _validate_lookup(self, model, model_name, field_name, lookup, field): - for field in model._meta.fields: - if field.name == field_name: - break + if not model_name and field_name not in model._meta.field_names: + allowed = ', '.join(model._meta.field_names) + raise SyncanoValueError('Invalid field name "{0}" allowed are {1}.'.format(field_name, allowed)) - query.setdefault(field_name, {}) - query[field_name]['_{0}'.format(lookup)] = field.to_query(value, lookup) + if lookup not in self.ALLOWED_LOOKUPS: + allowed = ', '.join(self.ALLOWED_LOOKUPS) + raise SyncanoValueError('Invalid lookup type "{0}" allowed are {1}.'.format(lookup, allowed)) - self.query['query'] = json.dumps(query) + if model_name and field.__class__.__name__ != 'RelationField': + raise SyncanoValueError('Lookup supported only for RelationField.') + + @classmethod + def _get_main_lookup(cls, model_name, field_name, lookup): + if model_name: + return 'is', model_name + else: + return lookup, field_name + + def bulk_create(self, *objects): + """ + Creates many new objects. + Usage:: + + created_objects = Object.please.bulk_create( + Object(instance_name='instance_a', class_name='some_class', title='one'), + Object(instance_name='instance_a', class_name='some_class', title='two'), + Object(instance_name='instance_a', class_name='some_class', title='three') + ) + + :param objects: a list of the instances of data objects to be created; + :return: a created and populated list of objects; When error occurs a plain dict is returned in that place; + """ + return ObjectBulkCreate(objects, self).process() + + def _get_response(self): + return self._initial_response or self.request() + + def _get_instance(self, attrs): + return self.model.get_subclass_model(**attrs)(**attrs) + + def _get_model_field_names(self): + object_fields = [f.name for f in self.model._meta.fields] + schema = self.model.get_class_schema(**self.properties) + + return object_fields + [i['name'] for i in schema.schema] + + def _validate_fields(self, model_fields, args): + for arg in args: + if arg not in model_fields: + msg = 'Field "{0}" does not exist in class {1}.' + raise SyncanoValidationError( + msg.format(arg, self.properties['class_name'])) + + @clone + def fields(self, *args): + """ + Special method just for data object :class:`~syncano.models.base.Object` model. + + Usage:: + + objects = Object.please.list('instance-name', 'class-name').fields('name', 'id') + """ + model_fields = self._get_model_field_names() + self._validate_fields(model_fields, args) + self.query['fields'] = ','.join(args) self.method = 'GET' self.endpoint = 'list' return self + @clone + def exclude(self, *args): + """ + Special method just for data object :class:`~syncano.models.base.Object` model. + + Usage:: + + objects = Object.please.list('instance-name', 'class-name').exclude('avatar') + """ + model_fields = self._get_model_field_names() + self._validate_fields(model_fields, args) + + fields = [f for f in model_fields if f not in args] + + self.query['fields'] = ','.join(fields) + self.method = 'GET' + self.endpoint = 'list' + return self + + def ordering(self, order=None): + raise AttributeError('Ordering not implemented. Use order_by instead.') + + @clone + def order_by(self, field): + """ + Sets ordering field of returned objects. + + Usage:: + + # ASC order + instances = Object.please.order_by('name') + + # DESC order + instances = Object.please.order_by('-name') + """ + if not field or not isinstance(field, six.string_types): + raise SyncanoValueError('Order by field needs to be a string.') + + self.query['order_by'] = field + return self + + def _clone(self): + manager = super(ObjectManager, self)._clone() + manager._initial_response = self._initial_response + return manager + class SchemaManager(object): """ diff --git a/syncano/models/manager_mixins.py b/syncano/models/manager_mixins.py new file mode 100644 index 0000000..91defef --- /dev/null +++ b/syncano/models/manager_mixins.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +from six import wraps +from syncano.exceptions import SyncanoValueError + + +def clone(func): + """Decorator which will ensure that we are working on copy of ``self``. + """ + @wraps(func) + def inner(self, *args, **kwargs): + self = self._clone() + return func(self, *args, **kwargs) + return inner + + +class IncrementMixin(object): + + @clone + def increment(self, field_name, value, **kwargs): + """ + A manager method which increments given field with given value. + + Usage:: + + data_object = Object.please.increment( + field_name='argA', + value=10, + class_name='testclass', + id=1715 + ) + + :param field_name: the field name to increment; + :param value: the increment value; + :param kwargs: class_name and id usually; + :return: the processed (incremented) data object; + """ + self.properties.update(kwargs) + model = self.model.get_subclass_model(**self.properties) + + self.validate(field_name, value, model) + + return self.process(field_name, value, **kwargs) + + @clone + def decrement(self, field_name, value, **kwargs): + """ + A manager method which decrements given field with given value. + + Usage:: + + data_object = Object.please.decrement( + field_name='argA', + value=10, + class_name='testclass', + id=1715 + ) + + :param field_name: the field name to decrement; + :param value: the decrement value; + :param kwargs: class_name and id usually; + :return: the processed (incremented) data object; + """ + self.properties.update(kwargs) + model = self.model.get_subclass_model(**self.properties) + + self.validate(field_name, value, model, operation_type='decrement') + + return self.process(field_name, value, operation_type='decrement', **kwargs) + + def process(self, field_name, value, operation_type='increment', **kwargs): + self.endpoint = 'detail' + self.method = self.get_allowed_method('PATCH', 'PUT', 'POST') + self.data = kwargs.copy() + + if operation_type == 'increment': + increment_data = {'_increment': value} + elif operation_type == 'decrement': + increment_data = {'_increment': -value} + else: + raise SyncanoValueError('Operation not supported') + + self.data.update( + {field_name: increment_data} + ) + + response = self.request() + return response + + @classmethod + def validate(cls, field_name, value, model, operation_type='increment'): + if not isinstance(value, (int, float)): + raise SyncanoValueError('Provide an integer or float as a {} value.'.format(operation_type)) + + if not value >= 0: + raise SyncanoValueError('Value should be positive.') + + if not cls._check_field_type_for_increment(model, field_name): + raise SyncanoValueError('{} works only on integer and float fields.'.format(operation_type.capitalize())) + + @classmethod + def _check_field_type_for_increment(cls, model, field_name): + fields = {} + for field in model._meta.fields: + fields[field.name] = field.allow_increment + + if field_name not in fields: + raise SyncanoValueError('Object has not specified field.') + + if fields[field_name]: + return True + + return False + + +class ArrayOperationsMixin(object): + + @clone + def add(self, field_name, value, **kwargs): + """ + A manager method that will add a values to the array field. + + Usage:: + + data_object = Object.please.add( + field_name='array', + value=[10], + class_name='arr_test', + id=155 + ) + + Consider example: + + data_object.array = [1] + + after running:: + + data_object = Object.please.add( + field_name='array', + value=[3], + id=data_object.id, + ) + + data_object.array will be equal: [1, 3] + + and after:: + + data_object = Object.please.add( + field_name='array', + value=[1], + id=data_object.id, + ) + + data_object.array will be equal: [1, 3, 1] + + :param field_name: the array field name to which elements will be added; + :param value: the list of values to add; + :param kwargs: class_name and id usually; + :return: the processed data object; + """ + self.properties.update(kwargs) + model = self.model.get_subclass_model(**self.properties) + + self.array_validate(field_name, value, model) + return self.array_process(field_name, value, operation_type='add') + + def remove(self, field_name, value, **kwargs): + """ + A manager method that will remove a values from the array field. + + Usage:: + + data_object = Object.please.remove( + field_name='array', + value=[10], + class_name='arr_test', + id=155 + ) + + :param field_name: the array field name from which elements will be removed; + :param value: the list of values to remove; + :param kwargs: class_name and id usually; + :return: the processed data object; + """ + self.properties.update(kwargs) + model = self.model.get_subclass_model(**self.properties) + + self.array_validate(field_name, value, model) + return self.array_process(field_name, value, operation_type='remove') + + def add_unique(self, field_name, value, **kwargs): + """ + A manager method that will add an unique values to the array field. + + Usage:: + + data_object = Object.please.add_unique( + field_name='array', + value=[10], + class_name='arr_test', + id=155 + ) + + The main distinction between add and add unique is that: add unique will not repeat elements. + Consider example:: + + data_object.array = [1] + + after running:: + + data_object = Object.please.add_unique( + field_name='array', + value=[1], + id=data_object.id, + ) + + data_object.array will be equal: [1] + + But if only add will be run the result will be as follow: + + data_object.array will be equal: [1, 1] + + :param field_name: field_name: the array field name to which elements will be added unique; + :param value: the list of values to add unique; + :param kwargs: class_name and id usually; + :return: the processed data object; + """ + self.properties.update(kwargs) + model = self.model.get_subclass_model(**self.properties) + + self.array_validate(field_name, value, model) + return self.array_process(field_name, value, operation_type='add_unique') + + @classmethod + def array_validate(cls, field_name, value, model): + + fields = {field.name: field for field in model._meta.fields} + if field_name not in fields: + raise SyncanoValueError('Object has not specified field.') + + from syncano.models import ArrayField + if not isinstance(fields[field_name], ArrayField): + raise SyncanoValueError('Field must be of array type') + + if not isinstance(value, list): + raise SyncanoValueError('List of values expected') + + def array_process(self, field_name, value, operation_type, **kwargs): + self.endpoint = 'detail' + self.method = self.get_allowed_method('PATCH', 'PUT', 'POST') + self.data = kwargs.copy() + + if operation_type == 'add': + array_data = {'_add': value} + elif operation_type == 'remove': + array_data = {'_remove': value} + elif operation_type == 'add_unique': + array_data = {'_addunique': value} + else: + raise SyncanoValueError('Operation not supported') + + self.data.update( + {field_name: array_data} + ) + + response = self.request() + return response diff --git a/syncano/models/mixins.py b/syncano/models/mixins.py new file mode 100644 index 0000000..8c2d016 --- /dev/null +++ b/syncano/models/mixins.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- + + +class RenameMixin(object): + + def rename(self, new_name): + """ + A method for changing the name of the object; Corresponds to the Mixin in CORE; + + :param new_name: the new name for the object; + :return: a populated object; + """ + rename_path = self.links.rename + data = {'new_name': new_name} + connection = self._get_connection() + response = connection.request('POST', rename_path, data=data) + self.to_python(response) + return self diff --git a/syncano/models/options.py b/syncano/models/options.py index 1a4e9fa..605194e 100644 --- a/syncano/models/options.py +++ b/syncano/models/options.py @@ -1,14 +1,17 @@ import re from bisect import bisect -from urlparse import urljoin import six - from syncano.connection import ConnectionMixin -from syncano.exceptions import SyncanoValueError +from syncano.exceptions import SyncanoValidationError, SyncanoValueError from syncano.models.registry import registry from syncano.utils import camelcase_to_underscore +if six.PY3: + from urllib.parse import urljoin +else: + from urlparse import urljoin + class Options(ConnectionMixin): """Holds metadata related to model definition.""" @@ -130,15 +133,23 @@ def get_endpoint_methods(self, name): endpoint = self.get_endpoint(name) return endpoint['methods'] - def resolve_endpoint(self, name, properties): - endpoint = self.get_endpoint(name) + def resolve_endpoint(self, endpoint_name, properties, http_method=None): + if http_method and not self.is_http_method_available(http_method, endpoint_name): + raise SyncanoValidationError( + 'HTTP method {0} not allowed for endpoint "{1}".'.format(http_method, endpoint_name) + ) + endpoint = self.get_endpoint(endpoint_name) - for name in endpoint['properties']: - if name not in properties: - raise SyncanoValueError('Request property "{0}" is required.'.format(name)) + for endpoint_name in endpoint['properties']: + if endpoint_name not in properties: + raise SyncanoValueError('Request property "{0}" is required.'.format(endpoint_name)) return endpoint['path'].format(**properties) + def is_http_method_available(self, http_method_name, endpoint_name): + available_methods = self.get_endpoint_methods(endpoint_name) + return http_method_name.lower() in available_methods + def get_endpoint_query_params(self, name, params): properties = self.get_endpoint_properties(name) return {k: v for k, v in six.iteritems(params) if k not in properties} diff --git a/syncano/models/push_notification.py b/syncano/models/push_notification.py new file mode 100644 index 0000000..b8a7c86 --- /dev/null +++ b/syncano/models/push_notification.py @@ -0,0 +1,326 @@ +# -*- coding: utf-8 -*- + +from . import fields +from .base import Instance, Model + + +class DeviceBase(object): + """ + Base abstract class for GCM and APNS Devices; + """ + LINKS = ( + {'type': 'detail', 'name': 'self'}, + ) + + registration_id = fields.StringField(max_length=512, unique=True, primary_key=True) + device_id = fields.StringField(required=False) + is_active = fields.BooleanField(default=True) + label = fields.StringField(max_length=80) + user = fields.IntegerField(required=False) + + links = fields.LinksField() + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + abstract = True + + def send_message(self, content): + """ + A method which allows to send message directly to the device; + :param contet: Message content structure - object like; + :return: + """ + send_message_path = self.links.send_message + data = { + 'content': content + } + connection = self._get_connection() + response = connection.request('POST', send_message_path, data=data) + self.to_python(response) + return self + + +class GCMDevice(DeviceBase, Model): + """ + Model which handles the Google Cloud Message Device. + CORE supports only Create, Delete and Read; + + Usage:: + + Create a new Device: + gcm_device = GCMDevice( + label='example label', + registration_id=86152312314401555, + user_id=u.id, + device_id='10000000001', + ) + + gcm_device.save() + + Read: + gcm_device = GCMDevice.please.get(registration_id=86152312314401554) + + Delete: + gcm_device.delete() + + Update: + gcm_device.label = 'some new label' + gcm_device.save() + + """ + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'get', 'put', 'patch'], + 'path': '/push_notifications/gcm/devices/{registration_id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/push_notifications/gcm/devices/', + } + } + + +class APNSDevice(DeviceBase, Model): + """ + Model which handles the Apple Push Notification Server Device. + CORE supports only Create, Delete and Read; + + Usage:: + + Create a new Device: + apns_device = APNSDevice( + label='example label', + registration_id='4719084371920471208947120984731208947910827409128470912847120894', + user_id=u.id, + device_id='7189d7b9-4dea-4ecc-aa59-8cc61a20608a', + ) + apns_device.save() + + Read: + apns_device = + APNSDevice.please.get(registration_id='4719084371920471208947120984731208947910827409128470912847120894') + + Delete: + apns_device.delete() + + Update: + apns_device.label = 'some new label' + apns_device.save() + + .. note:: + + Also note the different format (from GCM) of registration_id required by APNS; the device_id have different + format too. + + """ + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'get', 'put', 'patch'], + 'path': '/push_notifications/apns/devices/{registration_id}/', + }, + 'list': { + 'methods': ['post', 'get'], + 'path': '/push_notifications/apns/devices/', + } + } + + +class MessageBase(object): + """ + Base abstract class for GCM and APNS Messages; + """ + + status = fields.StringField(read_only=True) + content = fields.PushJSONField(default={}) + result = fields.JSONField(default={}, read_only=True) + + created_at = fields.DateTimeField(read_only=True, required=False) + updated_at = fields.DateTimeField(read_only=True, required=False) + + class Meta: + abstract = True + + +class GCMMessage(MessageBase, Model): + """ + Model which handles the Google Cloud Messaging Message. + Only creating and reading is allowed. + + Usage:: + + Create a new Message: + + message = GCMMessage( + content={ + 'registration_ids': [gcm_device.registration_id], # maximum 1000 elements; + 'data': { + 'example_data_one': 1, + 'example_data_two': 2, + } + } + ) + message.save() + + + Read: + + gcm_message = GCMMessage.please.get(id=1) + + Debugging: + + gcm_message.status - on of the (scheduled, error, partially_delivered, delivered) + gcm_message.result - a result from GCM server; + + + The data parameter is passed as-it-is to the GCM server; Base checking is made on syncano CORE; + For more details read the GCM documentation; + + .. note:: + Every save after initial one will raise an error; + + .. note:: + The altering of existing Message is not possible. It also not possible to delete message. + + """ + + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'get'], + 'path': '/push_notifications/gcm/messages/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/push_notifications/gcm/messages/', + } + } + + +class APNSMessage(MessageBase, Model): + """ + Model which handles the Apple Push Notification Server Message. + Only creating and reading is allowed. + + Usage:: + + Create new Message: + apns_message = APNSMessage( + content={ + 'registration_ids': [gcm_device.registration_id], + 'aps': {'alert': 'test alert'}, + } + ) + + apns_message.save() + + Read: + + apns_message = APNSMessage.please.get(id=1) + + Debugging: + + apns_message.status - one of the following: scheduled, error, partially_delivered, delivered; + apns_message.result - a result from APNS server; + + The 'aps' data is send 'as-it-is' to APNS, some validation is made on syncano CORE; + For more details read the APNS documentation; + + .. note:: + Every save after initial one will raise an error; + + """ + class Meta: + parent = Instance + endpoints = { + 'detail': { + 'methods': ['delete', 'get'], + 'path': '/push_notifications/apns/messages/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/push_notifications/apns/messages/', + } + } + + +class GCMConfig(Model): + """ + A model which stores information with GCM Push keys; + + Usage:: + + Add (modify) new keys: + gcm_config = GCMConfig(production_api_key='ccc', development_api_key='ddd') + gcm_config.save() + + or: + gcm_config = GCMConfig().please.get() + gcm_config.production_api_key = 'ccc' + gcm_config.development_api_key = 'ddd' + gcm_config.save() + + """ + production_api_key = fields.StringField(required=False) + development_api_key = fields.StringField(required=False) + + def is_new(self): + return False # this is predefined - never will be new + + class Meta: + parent = Instance + endpoints = { + 'list': { + 'methods': ['get', 'put'], + 'path': '/push_notifications/gcm/config/', + }, + 'detail': { + 'methods': ['get', 'put'], + 'path': '/push_notifications/gcm/config/', + }, + } + + +class APNSConfig(Model): + """ + A model which stores information with APNS Push certificates; + + Usage:: + + Add (modify) new keys: + cert_file = open('cert_file.p12', 'rb') + apns_config = APNSConfig(development_certificate=cert_file) + apns_config.save() + cert_file.close() + + """ + production_certificate_name = fields.StringField(required=False) + production_certificate = fields.FileField(required=False) + production_bundle_identifier = fields.StringField(required=False) + production_expiration_date = fields.DateField(read_only=True) + development_certificate_name = fields.StringField(required=False) + development_certificate = fields.FileField(required=False) + development_bundle_identifier = fields.StringField(required=False) + development_expiration_date = fields.DateField(read_only=True) + + def is_new(self): + return False # this is predefined - never will be new + + class Meta: + parent = Instance + endpoints = { + 'list': { + 'methods': ['get', 'put'], + 'path': '/push_notifications/apns/config/', + }, + 'detail': { + 'methods': ['get', 'put'], + 'path': '/push_notifications/apns/config/', + }, + } diff --git a/syncano/models/registry.py b/syncano/models/registry.py index 93f1a90..e6be17b 100644 --- a/syncano/models/registry.py +++ b/syncano/models/registry.py @@ -1,25 +1,27 @@ -from __future__ import unicode_literals + import re import six - from syncano import logger class Registry(object): - """Models registry.""" - + """Models registry. + """ def __init__(self, models=None): self.models = models or {} + self.schemas = {} self.patterns = [] self._pending_lookups = {} + self.instance_name = None + self._default_connection = None def __str__(self): return 'Registry: {0}'.format(', '.join(self.models)) def __unicode__(self): - return unicode(str(self)) + return six.u(str(self)) def __iter__(self): for name, model in six.iteritems(self.models): @@ -43,18 +45,21 @@ def get_model_by_path(self, path): def get_model_by_name(self, name): return self.models[name] - def add(self, name, cls): + def update(self, name, cls): + self.models[name] = cls + related_name = cls._meta.related_name + patterns = self.get_model_patterns(cls) + self.patterns.extend(patterns) - if name not in self.models: - self.models[name] = cls - related_name = cls._meta.related_name - patterns = self.get_model_patterns(cls) - self.patterns.extend(patterns) + setattr(self, str(name), cls) + setattr(self, str(related_name), cls.please.all()) + + logger.debug('New model: %s, %s', name, related_name) - setattr(self, str(name), cls) - setattr(self, str(related_name), cls.please.all()) + def add(self, name, cls): - logger.debug('New model: %s, %s', name, related_name) + if name not in self.models: + self.update(name, cls) if name in self._pending_lookups: lookups = self._pending_lookups.pop(name) @@ -65,17 +70,41 @@ def add(self, name, cls): def set_default_property(self, name, value): for model in self: - if name in model.__dict__: + if name not in model.__dict__: + continue - if name not in model.please.properties: - model.please.properties[name] = value - - for field in model._meta.fields: - if field.name == name: - field.default = value + for field in model._meta.fields: + if field.name == name: + field.default = value def set_default_instance(self, value): self.set_default_property('instance_name', value) + def set_used_instance(self, instance): + if instance and self.instance_name != instance or registry.instance_name is None: + self.set_default_instance(instance) # update the registry with last used instance; + self.instance_name = instance + + def clear_used_instance(self): + self.instance_name = None + self.set_default_instance(None) + + def get_schema(self, class_name): + return self.schemas.get(class_name) + + def set_schema(self, class_name, schema): + self.schemas[class_name] = schema + + def clear_schemas(self): + self.schemas = {} + + def set_default_connection(self, default_connection): + self._default_connection = default_connection + + @property + def connection(self): + if not self._default_connection: + raise Exception('Set the default connection first.') + return self._default_connection registry = Registry() diff --git a/syncano/models/relations.py b/syncano/models/relations.py new file mode 100644 index 0000000..1c23bd6 --- /dev/null +++ b/syncano/models/relations.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +from syncano.exceptions import SyncanoValueError + + +class RelationValidatorMixin(object): + + def validate(self, value, model_instance): + super(RelationValidatorMixin, self).validate(value, model_instance) + self._check_relation_value(value) + + @classmethod + def _check_relation_value(cls, value): + if value is None: + return False + + if '_add' in value or '_remove' in value: + check_value = value.get('_add') or value.get('_remove') + else: + check_value = value + + check_value = cls._make_list(check_value) + + all_ints = all([isinstance(x, int) for x in check_value]) + from .archetypes import Model + all_objects = all([isinstance(obj, Model) for obj in check_value]) + object_types = [type(obj) for obj in check_value] + if len(set(object_types)) != 1: + raise SyncanoValueError("All objects should be the same type.") + + if (all_ints and all_objects) or (not all_ints and not all_objects): + raise SyncanoValueError("List elements should be objects or integers.") + + if all_objects: + return True + return False + + @classmethod + def _make_list(cls, value): + if not isinstance(value, (list, tuple)): + value = [value] + return value + + +class RelationManager(RelationValidatorMixin): + + def __init__(self, instance, field_name): + super(RelationManager, self).__init__() + self.instance = instance + self.model = instance._meta + self.field_name = field_name + + def add(self, *args): + self._add_or_remove(args) + + def remove(self, *args): + self._add_or_remove(args, operation='_remove') + + def _add_or_remove(self, id_list, operation='_add'): + if self._check_relation_value(id_list): + value_ids = [obj.id for obj in id_list] + else: + value_ids = id_list + + meta = self.instance._meta + connection = meta.connection + + data = {self.field_name: {operation: value_ids}} + update_path = meta.get_endpoint(name='detail')['path'] + update_path = update_path.format(**self.instance.get_endpoint_data()) + response = connection.request('PATCH', update_path, data=data) + self.instance.to_python(response) diff --git a/syncano/models/traces.py b/syncano/models/traces.py new file mode 100644 index 0000000..e99003a --- /dev/null +++ b/syncano/models/traces.py @@ -0,0 +1,150 @@ + + +from . import fields +from .base import Model +from .custom_response import CustomResponseMixin +from .incentives import Schedule, Script, ScriptEndpoint, Trigger + + +class ScriptTrace(CustomResponseMixin, Model): + """ + :ivar status: :class:`~syncano.models.fields.ChoiceField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar executed_at: :class:`~syncano.models.fields.DateTimeField` + :ivar result: :class:`~syncano.models.fields.StringField` + :ivar duration: :class:`~syncano.models.fields.IntegerField` + """ + STATUS_CHOICES = ( + {'display_name': 'Success', 'value': 'success'}, + {'display_name': 'Failure', 'value': 'failure'}, + {'display_name': 'Timeout', 'value': 'timeout'}, + {'display_name': 'Processing', 'value': 'processing'}, + {'display_name': 'Pending', 'value': 'pending'}, + ) + + status = fields.ChoiceField(choices=STATUS_CHOICES, read_only=True, required=False) + links = fields.LinksField() + executed_at = fields.DateTimeField(read_only=True, required=False) + result = fields.JSONField(read_only=True, required=False) + duration = fields.IntegerField(read_only=True, required=False) + + class Meta: + parent = Script + endpoints = { + 'detail': { + 'methods': ['get'], + 'path': '/traces/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/traces/', + } + } + + +class ScheduleTrace(Model): + """ + :ivar status: :class:`~syncano.models.fields.ChoiceField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar executed_at: :class:`~syncano.models.fields.DateTimeField` + :ivar result: :class:`~syncano.models.fields.StringField` + :ivar duration: :class:`~syncano.models.fields.IntegerField` + """ + STATUS_CHOICES = ( + {'display_name': 'Success', 'value': 'success'}, + {'display_name': 'Failure', 'value': 'failure'}, + {'display_name': 'Timeout', 'value': 'timeout'}, + {'display_name': 'Pending', 'value': 'pending'}, + ) + + status = fields.ChoiceField(choices=STATUS_CHOICES, read_only=True, required=False) + links = fields.LinksField() + executed_at = fields.DateTimeField(read_only=True, required=False) + result = fields.StringField(read_only=True, required=False) + duration = fields.IntegerField(read_only=True, required=False) + + class Meta: + parent = Schedule + endpoints = { + 'detail': { + 'methods': ['get'], + 'path': '/traces/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/traces/', + } + } + + +class TriggerTrace(Model): + """ + :ivar status: :class:`~syncano.models.fields.ChoiceField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar executed_at: :class:`~syncano.models.fields.DateTimeField` + :ivar result: :class:`~syncano.models.fields.StringField` + :ivar duration: :class:`~syncano.models.fields.IntegerField` + """ + STATUS_CHOICES = ( + {'display_name': 'Success', 'value': 'success'}, + {'display_name': 'Failure', 'value': 'failure'}, + {'display_name': 'Timeout', 'value': 'timeout'}, + {'display_name': 'Pending', 'value': 'pending'}, + ) + LINKS = ( + {'type': 'detail', 'name': 'self'}, + ) + + status = fields.ChoiceField(choices=STATUS_CHOICES, read_only=True, required=False) + links = fields.LinksField() + executed_at = fields.DateTimeField(read_only=True, required=False) + result = fields.StringField(read_only=True, required=False) + duration = fields.IntegerField(read_only=True, required=False) + + class Meta: + parent = Trigger + endpoints = { + 'detail': { + 'methods': ['get'], + 'path': '/traces/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/traces/', + } + } + + +class ScriptEndpointTrace(CustomResponseMixin, Model): + """ + :ivar status: :class:`~syncano.models.fields.ChoiceField` + :ivar links: :class:`~syncano.models.fields.HyperlinkedField` + :ivar executed_at: :class:`~syncano.models.fields.DateTimeField` + :ivar result: :class:`~syncano.models.fields.StringField` + :ivar duration: :class:`~syncano.models.fields.IntegerField` + """ + STATUS_CHOICES = ( + {'display_name': 'Success', 'value': 'success'}, + {'display_name': 'Failure', 'value': 'failure'}, + {'display_name': 'Timeout', 'value': 'timeout'}, + {'display_name': 'Pending', 'value': 'pending'}, + ) + + status = fields.ChoiceField(choices=STATUS_CHOICES, read_only=True, required=False) + links = fields.LinksField() + executed_at = fields.DateTimeField(read_only=True, required=False) + result = fields.JSONField(read_only=True, required=False) + duration = fields.IntegerField(read_only=True, required=False) + + class Meta: + parent = ScriptEndpoint + endpoints = { + 'detail': { + 'methods': ['get'], + 'path': '/traces/{id}/', + }, + 'list': { + 'methods': ['get', 'post'], + 'path': '/traces/', + } + } diff --git a/syncano/release_utils.py b/syncano/release_utils.py new file mode 100644 index 0000000..e75c17c --- /dev/null +++ b/syncano/release_utils.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- + +import warnings +from functools import wraps + +warnings.simplefilter('once') + + +class Deprecated(object): + + def __init__(self, lineno, removed_in_version): + self.lineno = lineno # how many decorators decorates the depracated func; + self.removed_in_version = removed_in_version + + def __call__(self, original_func): + @wraps(original_func) + def new_func(*args, **kwargs): + warnings.warn_explicit( + message="Call to deprecated function '{}'. Will be removed in version: {}.".format( + original_func.__name__, + self.removed_in_version + ), + category=DeprecationWarning, + filename=original_func.__code__.co_filename, + lineno=original_func.__code__.co_firstlineno + self.lineno) + return original_func(*args, **kwargs) + return new_func diff --git a/tests/certificates/ApplePushDevelopment.p12 b/tests/certificates/ApplePushDevelopment.p12 new file mode 100644 index 0000000..a8a0aa8 Binary files /dev/null and b/tests/certificates/ApplePushDevelopment.p12 differ diff --git a/tests/integration_test.py b/tests/integration_test.py index b4e3a24..2acfcf3 100644 --- a/tests/integration_test.py +++ b/tests/integration_test.py @@ -6,8 +6,8 @@ from uuid import uuid4 import syncano -from syncano.exceptions import SyncanoValueError, SyncanoRequestError -from syncano.models import Class, CodeBox, Instance, Object, Webhook +from syncano.exceptions import SyncanoRequestError, SyncanoValueError +from syncano.models import ApiKey, Class, DataEndpoint, Instance, Model, Object, Script, ScriptEndpoint, registry class IntegrationTest(unittest.TestCase): @@ -32,7 +32,8 @@ def tearDownClass(cls): @classmethod def generate_hash(cls): - return md5('%s%s' % (uuid4(), datetime.now())).hexdigest() + hash_feed = '{}{}'.format(uuid4(), datetime.now()) + return md5(hash_feed.encode('ascii')).hexdigest() class InstanceMixin(object): @@ -42,7 +43,7 @@ def setUpClass(cls): super(InstanceMixin, cls).setUpClass() cls.instance = cls.connection.Instance.please.create( - name='i%s' % cls.generate_hash()[:10], + name='testpythonlib%s' % cls.generate_hash()[:10], description='IntegrationTest %s' % datetime.now(), ) @@ -105,11 +106,21 @@ def test_delete(self): with self.assertRaises(self.model.DoesNotExist): self.model.please.get(name=name) + def test_rename(self): + name = 'i%s' % self.generate_hash()[:10] + new_name = 'icy-snow-jon-von-doe-312' + + instance = self.model.please.create(name=name, description='rest_rename') + instance = instance.rename(new_name=new_name) + + self.assertEqual(instance.name, new_name) + class ClassIntegrationTest(InstanceMixin, IntegrationTest): model = Class def test_instance_name_is_required(self): + registry.clear_used_instance() with self.assertRaises(SyncanoValueError): list(self.model.please.all()) @@ -178,7 +189,7 @@ def test_update(self): ) cls.description = 'dummy' - for i in xrange(3): + for i in range(3): try: cls.save() except SyncanoRequestError as e: @@ -220,6 +231,7 @@ def setUpClass(cls): {'type': 'float', 'name': 'cost'}, {'type': 'boolean', 'name': 'available'}, {'type': 'datetime', 'name': 'published_at'}, + {'type': 'array', 'name': 'array'}, {'type': 'file', 'name': 'cover'}, {'type': 'reference', 'name': 'author', 'order_index': True, 'filter_index': True, 'target': cls.author.name}, @@ -256,7 +268,11 @@ def test_create(self): name='test', description='test', quantity=10, cost=10.5, published_at=datetime.now(), author=author, available=True) + book_direct = Object(class_name=self.book.name, quantity=15, cost=7.5) + book_direct.save() + book.delete() + book_direct.delete() author.delete() def test_update(self): @@ -274,113 +290,374 @@ def test_update(self): author.delete() + def test_count_and_with_count(self): + author_one = self.model.please.create( + instance_name=self.instance.name, class_name=self.author.name, + first_name='john1', last_name='doe1') + + author_two = self.model.please.create( + instance_name=self.instance.name, class_name=self.author.name, + first_name='john2', last_name='doe2') + + # just created two authors + + count = Object.please.list(instance_name=self.instance.name, class_name=self.author.name).count() + self.assertEqual(count, 2) + + objects, count = Object.please.list(instance_name=self.instance.name, + class_name=self.author.name).with_count() + + self.assertEqual(count, 2) + for o in objects: + self.assertTrue(isinstance(o, Model)) + + author_one.delete() + author_two.delete() + + def test_increment_and_decrement_on_integer(self): + author = self.model.please.create( + instance_name=self.instance.name, class_name=self.author.name, + first_name='john', last_name='doe') + + book = self.model.please.create( + instance_name=self.instance.name, class_name=self.book.name, + name='test', description='test', quantity=10, cost=10.5, + published_at=datetime.now(), author=author, available=True) + + incremented_book = Object.please.increment( + 'quantity', + 5, + id=book.id, + class_name=self.book.name, + ) + + self.assertEqual(incremented_book.quantity, 15) + + decremented_book = Object.please.decrement( + 'quantity', + 7, + id=book.id, + class_name=self.book.name, + ) + + self.assertEqual(decremented_book.quantity, 8) + + def test_increment_and_decrement_on_float(self): + author = self.model.please.create( + instance_name=self.instance.name, class_name=self.author.name, + first_name='john', last_name='doe') + + book = self.model.please.create( + instance_name=self.instance.name, class_name=self.book.name, + name='test', description='test', quantity=10, cost=10.5, + published_at=datetime.now(), author=author, available=True) + + incremented_book = Object.please.increment( + 'cost', + 5.5, + id=book.id, + class_name=self.book.name, + ) + + self.assertEqual(incremented_book.cost, 16) + + decremented_book = Object.please.decrement( + 'cost', + 7.6, + id=book.id, + class_name=self.book.name, + ) + + self.assertEqual(decremented_book.cost, 8.4) + + def test_add_array(self): + book = self.model.please.create( + instance_name=self.instance.name, class_name=self.book.name, + name='test', description='test', quantity=10, cost=10.5, + published_at=datetime.now(), available=True, array=[10]) + + book = Object.please.add( + 'array', + [11], + class_name=self.book.name, + id=book.id + ) + + self.assertEqual(book.array, [10, 11]) + + def test_remove_array(self): + book = self.model.please.create( + instance_name=self.instance.name, class_name=self.book.name, + name='test', description='test', quantity=10, cost=10.5, + published_at=datetime.now(), available=True, array=[10]) + + book = Object.please.remove( + 'array', + [10], + class_name=self.book.name, + id=book.id + ) -class CodeboxIntegrationTest(InstanceMixin, IntegrationTest): - model = CodeBox + self.assertEqual(book.array, []) + + def test_addunique_array(self): + book = self.model.please.create( + instance_name=self.instance.name, class_name=self.book.name, + name='test', description='test', quantity=10, cost=10.5, + published_at=datetime.now(), available=True, array=[10]) + + book = Object.please.add_unique( + 'array', + [10], + class_name=self.book.name, + id=book.id + ) + + self.assertEqual(book.array, [10]) + + book = Object.please.add_unique( + 'array', + [11], + class_name=self.book.name, + id=book.id + ) + + self.assertEqual(book.array, [10, 11]) + + +class ScriptIntegrationTest(InstanceMixin, IntegrationTest): + model = Script @classmethod def tearDownClass(cls): - for cb in cls.instance.codeboxes.all(): + for cb in cls.instance.scripts.all(): cb.delete() - super(CodeboxIntegrationTest, cls).tearDownClass() + super(ScriptIntegrationTest, cls).tearDownClass() def test_required_fields(self): with self.assertRaises(SyncanoValueError): + registry.clear_used_instance() list(self.model.please.all()) def test_list(self): - codeboxes = self.model.please.all(self.instance.name) - self.assertTrue(len(list(codeboxes)) >= 0) + scripts = self.model.please.all(self.instance.name) + self.assertTrue(len(list(scripts)) >= 0) def test_create(self): - codebox = self.model.please.create( + script = self.model.please.create( instance_name=self.instance.name, - name='cb%s' % self.generate_hash()[:10], + label='cb%s' % self.generate_hash()[:10], runtime_name='python', source='print "IntegrationTest"' ) - codebox.delete() + script.delete() def test_update(self): - codebox = self.model.please.create( + script = self.model.please.create( instance_name=self.instance.name, - name='cb%s' % self.generate_hash()[:10], + label='cb%s' % self.generate_hash()[:10], runtime_name='python', source='print "IntegrationTest"' ) - codebox.source = 'print "NotIntegrationTest"' - codebox.save() + script.source = 'print "NotIntegrationTest"' + script.save() - codebox2 = self.model.please.get(self.instance.name, codebox.pk) - self.assertEqual(codebox.source, codebox2.source) + script2 = self.model.please.get(self.instance.name, script.pk) + self.assertEqual(script.source, script2.source) - codebox.delete() + script.delete() def test_source_run(self): - codebox = self.model.please.create( + script = self.model.please.create( instance_name=self.instance.name, - name='cb%s' % self.generate_hash()[:10], + label='cb%s' % self.generate_hash()[:10], runtime_name='python', source='print "IntegrationTest"' ) - trace = codebox.run() + trace = script.run() + while trace.status in ['pending', 'processing']: + sleep(1) + trace.reload() + + self.assertEqual(trace.status, 'success') + self.assertDictEqual(trace.result, {'stderr': '', 'stdout': 'IntegrationTest'}) + + script.delete() + + def test_custom_response_run(self): + script = self.model.please.create( + instance_name=self.instance.name, + label='cb%s' % self.generate_hash()[:10], + runtime_name='python', + source=""" +set_response(HttpResponse(status_code=200, content='{"one": 1}', content_type='application/json'))""" + ) + + trace = script.run() while trace.status == 'pending': sleep(1) trace.reload() - self.assertEquals(trace.status, 'success') - self.assertEquals(trace.result, 'IntegrationTest') + self.assertEqual(trace.status, 'success') + self.assertDictEqual(trace.content, {'one': 1}) + self.assertEqual(trace.content_type, 'application/json') + self.assertEqual(trace.status_code, 200) - codebox.delete() + script.delete() -class WebhookIntegrationTest(InstanceMixin, IntegrationTest): - model = Webhook +class ScriptEndpointIntegrationTest(InstanceMixin, IntegrationTest): + model = ScriptEndpoint @classmethod def setUpClass(cls): - super(WebhookIntegrationTest, cls).setUpClass() + super(ScriptEndpointIntegrationTest, cls).setUpClass() - cls.codebox = CodeBox.please.create( + cls.script = Script.please.create( instance_name=cls.instance.name, - name='cb%s' % cls.generate_hash()[:10], + label='cb%s' % cls.generate_hash()[:10], runtime_name='python', source='print "IntegrationTest"' ) + cls.custom_script = Script.please.create( + instance_name=cls.instance.name, + label='cb%s' % cls.generate_hash()[:10], + runtime_name='python', + source=""" +set_response(HttpResponse(status_code=200, content='{"one": 1}', content_type='application/json'))""" + ) + @classmethod def tearDownClass(cls): - cls.codebox.delete() - super(WebhookIntegrationTest, cls).tearDownClass() + cls.script.delete() + super(ScriptEndpointIntegrationTest, cls).tearDownClass() def test_required_fields(self): with self.assertRaises(SyncanoValueError): + registry.clear_used_instance() list(self.model.please.all()) def test_list(self): - webhooks = self.model.please.all(self.instance.name) - self.assertTrue(len(list(webhooks)) >= 0) + script_endpoints = self.model.please.all(self.instance.name) + self.assertTrue(len(list(script_endpoints)) >= 0) def test_create(self): - webhook = self.model.please.create( + script_endpoint = self.model.please.create( instance_name=self.instance.name, - codebox=self.codebox.id, - slug='wh%s' % self.generate_hash()[:10], + script=self.script.id, + name='wh%s' % self.generate_hash()[:10], ) - webhook.delete() + script_endpoint.delete() - def test_codebox_run(self): - webhook = self.model.please.create( + def test_script_run(self): + script_endpoint = self.model.please.create( instance_name=self.instance.name, - codebox=self.codebox.id, - slug='wh%s' % self.generate_hash()[:10], + script=self.script.id, + name='wh%s' % self.generate_hash()[:10], ) - trace = webhook.run() - self.assertEquals(trace.status, 'success') - self.assertEquals(trace.result, 'IntegrationTest') - webhook.delete() + trace = script_endpoint.run() + self.assertEqual(trace.status, 'success') + self.assertDictEqual(trace.result, {'stderr': '', 'stdout': 'IntegrationTest'}) + script_endpoint.delete() + + def test_custom_script_run(self): + script_endpoint = self.model.please.create( + instance_name=self.instance.name, + script=self.custom_script.id, + name='wh%s' % self.generate_hash()[:10], + ) + + trace = script_endpoint.run() + self.assertDictEqual(trace, {'one': 1}) + script_endpoint.delete() + + +class ApiKeyIntegrationTest(InstanceMixin, IntegrationTest): + model = ApiKey + + def test_api_key_flags(self): + api_key = self.model.please.create( + allow_user_create=True, + ignore_acl=True, + allow_anonymous_read=True, + instance_name=self.instance.name, + ) + + reloaded_api_key = self.model.please.get(id=api_key.id, instance_name=self.instance.name) + + self.assertTrue(reloaded_api_key.allow_user_create, True) + self.assertTrue(reloaded_api_key.ignore_acl, True) + self.assertTrue(reloaded_api_key.allow_anonymous_read, True) + + +class DataEndpointIntegrationTest(InstanceMixin, IntegrationTest): + @classmethod + def setUpClass(cls): + super(DataEndpointIntegrationTest, cls).setUpClass() + cls.klass = cls.instance.classes.create( + name='sample_klass', + schema=[ + {'name': 'test1', 'type': 'string', 'filter_index': True}, + {'name': 'test2', 'type': 'string', 'filter_index': True}, + {'name': 'test3', 'type': 'integer', 'filter_index': True}, + ]) + + cls.data_object = cls.klass.objects.create( + class_name=cls.klass.name, + test1='atest', + test2='321', + test3=50 + ) + + cls.data_object = cls.klass.objects.create( + class_name=cls.klass.name, + test1='btest', + test2='432', + test3=45 + ) + + cls.data_object = cls.klass.objects.create( + class_name=cls.klass.name, + test1='ctest', + test2='543', + test3=35 + ) + + cls.data_endpoint = cls.instance.data_endpoints.create( + name='test_data_endpoint', + description='test description', + class_name=cls.klass.name, + query={'test3': {'_gt': 35}} + ) + + def test_mapping_class_name_lib_creation(self): + data_endpoint = DataEndpoint( + name='yet_another_data_endpoint', + class_name=self.klass.name, + ) + data_endpoint.save() + self.assertEqual(data_endpoint.class_name, 'sample_klass') + + def test_mapping_class_name_lib_read(self): + data_endpoint = self.instance.data_endpoints.get(name='test_data_endpoint') + self.assertEqual(data_endpoint.class_name, 'sample_klass') + + def test_data_endpoint_filtering(self): + data_endpoint = self.instance.data_endpoints.get(name='test_data_endpoint') + objects = [object for object in data_endpoint.get()] + self.assertEqual(len(objects), 2) + + objects = [object for object in data_endpoint.get(test1__eq='atest')] + self.assertEqual(len(objects), 1) + + def test_backward_compatibility_name(self): + from syncano.models import EndpointData + + data_endpoint = EndpointData.please.get(name='test_data_endpoint') + self.assertEqual(data_endpoint.class_name, 'sample_klass') diff --git a/tests/integration_test_accounts.py b/tests/integration_test_accounts.py new file mode 100644 index 0000000..44988c3 --- /dev/null +++ b/tests/integration_test_accounts.py @@ -0,0 +1,74 @@ +import os + +from syncano.connection import Connection + +from .integration_test import IntegrationTest + + +class LoginTest(IntegrationTest): + + @classmethod + def setUpClass(cls): + super(LoginTest, cls).setUpClass() + + cls.INSTANCE_NAME = os.getenv('INTEGRATION_INSTANCE_NAME') + cls.USER_NAME = os.getenv('INTEGRATION_USER_NAME') + cls.USER_PASSWORD = os.getenv('INTEGRATION_USER_PASSWORD') + cls.CLASS_NAME = "login_class_test" + + instance = cls.connection.Instance.please.create(name=cls.INSTANCE_NAME) + api_key = instance.api_keys.create(allow_user_create=True, + ignore_acl=True) + + user = instance.users.create(username=cls.USER_NAME, + password=cls.USER_PASSWORD) + + instance.classes.create(name=cls.CLASS_NAME, + schema='[{"name":"obj","type":"string"}]') + + cls.USER_KEY = user.user_key + cls.USER_API_KEY = api_key.api_key + + @classmethod + def tearDownClass(cls): + cls.connection.Instance.please.delete(name=cls.INSTANCE_NAME) + cls.connection = None + + def check_connection(self, con): + response = con.request('GET', '/v1.1/instances/{}/classes/'.format(self.INSTANCE_NAME)) + + obj_list = response['objects'] + + self.assertEqual(len(obj_list), 2) + self.assertEqual(sorted([o['name'] for o in obj_list]), sorted(['user_profile', self.CLASS_NAME])) + + def test_admin_login(self): + con = Connection(host=self.API_ROOT, + email=self.API_EMAIL, + password=self.API_PASSWORD) + self.check_connection(con) + + def test_admin_alt_login(self): + con = Connection(host=self.API_ROOT, + api_key=self.API_KEY) + self.check_connection(con) + + def test_user_login(self): + con = Connection(host=self.API_ROOT, + username=self.USER_NAME, + password=self.USER_PASSWORD, + api_key=self.API_KEY, + instance_name=self.INSTANCE_NAME) + self.check_connection(con) + + def test_user_alt_login(self): + con = Connection(host=self.API_ROOT, + api_key=self.USER_API_KEY, + user_key=self.USER_KEY, + instance_name=self.INSTANCE_NAME) + self.check_connection(con) + + def test_user_auth(self): + self.assertTrue( + self.connection.User().auth(username=self.USER_NAME, password=self.USER_PASSWORD) + ) diff --git a/tests/integration_test_backups.py b/tests/integration_test_backups.py new file mode 100644 index 0000000..b78f870 --- /dev/null +++ b/tests/integration_test_backups.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +import time + +from syncano.models import Backup +from tests.integration_test import InstanceMixin, IntegrationTest + + +class FullBackupTestCase(InstanceMixin, IntegrationTest): + + def _test_backup_create(self): + new_backup = Backup() + backup_test = new_backup.save() + + backup = Backup.please.get(id=backup_test.id) + self.assertTrue(backup) + self.assertEqual(backup.id, backup_test.id) + self.assertEqual(backup.author.email, self.API_EMAIL) + + return backup.id + + def _test_backup_detail(self, backup_id): + backup = Backup.please.get(id=backup_id) + + self.assertEqual(backup.id, backup_id) + self.assertEqual(backup.author.email, self.API_EMAIL) + + def _test_backup_list(self): + + backups = [backup for backup in Backup.please.list()] + self.assertTrue(len(backups)) # at least one backup here; + + def _test_backup_schedule_restore(self, backup_id): + backup = Backup.please.get(id=backup_id) + + # wait for backup to be saved + seconds_waited = 0 + while backup.status in ['scheduled', 'running']: + seconds_waited += 1 + self.assertTrue(seconds_waited < 20, 'Waiting for backup to be saved takes too long.') + time.sleep(1) + backup.reload() + + restore = backup.schedule_restore() + self.assertIn(restore.status, ['success', 'scheduled']) + + def _test_backup_delete(self, backup_id): + backup = Backup.please.get(id=backup_id) + backup.delete() + backups = [backup_object for backup_object in Backup.please.list()] + self.assertEqual(len(backups), 0) + + def test_backup(self): + # we provide one test for all functionality to avoid creating too many backups; + backup_id = self._test_backup_create() + self._test_backup_list() + self._test_backup_detail(backup_id=backup_id) + self._test_backup_schedule_restore(backup_id=backup_id) + self._test_backup_delete(backup_id=backup_id) diff --git a/tests/integration_test_batch.py b/tests/integration_test_batch.py new file mode 100644 index 0000000..c7e4b6e --- /dev/null +++ b/tests/integration_test_batch.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +import six +from syncano.models import Class, Model, Object, User +from tests.integration_test import InstanceMixin, IntegrationTest + + +class ManagerBatchTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(ManagerBatchTest, cls).setUpClass() + cls.klass = cls.instance.classes.create(name='class_a', schema=[{'name': 'title', 'type': 'string'}]) + cls.update1 = cls.klass.objects.create(title='update1') + cls.update2 = cls.klass.objects.create(title='update2') + cls.update3 = cls.klass.objects.create(title='update3') + cls.delete1 = cls.klass.objects.create(title='delete1') + cls.delete2 = cls.klass.objects.create(title='delete2') + cls.delete3 = cls.klass.objects.create(title='delete3') + + def test_batch_create(self): + + objects = [] + for i in range(5): + objects.append(Object(instance_name=self.instance.name, class_name=self.klass.name, title=str(i))) + + results = Object.please.bulk_create(*objects) + for r in results: + self.assertTrue(isinstance(r, Model)) + self.assertTrue(r.id) + self.assertTrue(r.title) + + # test batch now: + results = Object.please.batch( + self.klass.objects.as_batch().create(title='one'), + self.klass.objects.as_batch().create(title='two'), + self.klass.objects.as_batch().create(title='three'), + ) + + for r in results: + self.assertTrue(isinstance(r, Model)) + self.assertTrue(r.id) + self.assertTrue(r.title) + + def test_create_batch_users(self): + users = self.instance.users.bulk_create( + User(username='Terminator', password='skynet'), + User(username='Terminator2', password='skynet'), + ) + + self.assertEqual(len(set([u.username for u in users])), 2) + + for user in users: + self.assertTrue(isinstance(user, User)) + self.assertTrue(user.id) + self.assertTrue(user.username in ['Terminator', 'Terminator2']) + + # test batch now: + users = self.instance.users.batch( + self.instance.users.as_batch().create(username='Terminator3', password='SarahConnor'), + self.instance.users.as_batch().create(username='Terminator4', password='BigTruckOnRoad'), + ) + + for user in users: + self.assertTrue(isinstance(user, User)) + self.assertTrue(user.id) + self.assertTrue(user.username in ['Terminator3', 'Terminator4']) + + def test_batch_update(self): + updates = Object.please.batch( + self.klass.objects.as_batch().update(id=self.update1.id, title='FactoryChase'), + self.klass.objects.as_batch().update(id=self.update1.id, title='Photoplay'), + self.klass.objects.as_batch().update(id=self.update1.id, title='Intimacy'), + ) + + self.assertEqual(len(set([u.title for u in updates])), 3) + + for u in updates: + self.assertTrue(u.title in ['FactoryChase', 'Photoplay', 'Intimacy']) + + def test_batch_delete(self): + deletes = Object.please.batch( + self.klass.objects.as_batch().delete(id=self.delete1.id), + self.klass.objects.as_batch().delete(id=self.delete2.id), + ) + + for d in deletes: + self.assertTrue(d['code'], 204) + + def test_batch_mix(self): + mix_batches = Object.please.batch( + self.klass.objects.as_batch().create(title='four'), + self.klass.objects.as_batch().update(id=self.update3.id, title='TerminatorArrival'), + self.klass.objects.as_batch().delete(id=self.delete3.id) + ) + + # assert create; + self.assertTrue(mix_batches[0].id) + self.assertEqual(mix_batches[0].title, 'four') + + # assert update; + self.assertEqual(mix_batches[1].title, 'TerminatorArrival') + + # assert delete; + self.assertEqual(mix_batches[2]['code'], 204) + + def test_in_bulk_get(self): + + self.update1.reload() + self.update2.reload() + self.update3.reload() + + # test object bulk; + bulk_res = self.klass.objects.in_bulk([self.update1.id, self.update2.id, self.update3.id]) + + for res_id, res in six.iteritems(bulk_res): + self.assertEqual(res_id, res.id) + + self.assertEqual(bulk_res[self.update1.id].title, self.update1.title) + self.assertEqual(bulk_res[self.update2.id].title, self.update2.title) + self.assertEqual(bulk_res[self.update3.id].title, self.update3.title) + + # test class bulk + + c_bulk_res = Class.please.in_bulk(['class_a']) + + self.assertEqual(c_bulk_res['class_a'].name, 'class_a') + + # test 404 + + c_bulk_res = Class.please.in_bulk(['class_b']) + + self.assertEqual(c_bulk_res['class_b']['code'], 404) diff --git a/tests/integration_test_cache.py b/tests/integration_test_cache.py new file mode 100644 index 0000000..3aff7ad --- /dev/null +++ b/tests/integration_test_cache.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +from syncano.models import RuntimeChoices +from tests.integration_test import InstanceMixin, IntegrationTest + + +class DataEndpointCacheTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(DataEndpointCacheTest, cls).setUpClass() + cls.klass = cls.instance.classes.create( + name='sample_klass', + schema=[ + {'name': 'test1', 'type': 'string'}, + {'name': 'test2', 'type': 'string'} + ]) + + cls.data_object = cls.klass.objects.create( + class_name=cls.klass.name, + test1='123', + test2='321', + ) + + cls.data_endpoint = cls.instance.data_endpoints.create( + name='test_data_endpoint', + description='test description', + class_name=cls.klass.name + ) + + def test_cache_request(self): + data_endpoint = list(self.data_endpoint.get(cache_key='12345')) + + self.assertTrue(data_endpoint) + + for data_object in data_endpoint: + self.assertTrue(data_object) + + +class ScriptEndpointCacheTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(ScriptEndpointCacheTest, cls).setUpClass() + + cls.script = cls.instance.scripts.create( + label='test_script', + description='test script desc', + source='print(12)', + runtime_name=RuntimeChoices.PYTHON_V5_0, + ) + + cls.script_endpoint = cls.instance.script_endpoints.create( + name='test_script_endpoint', + script=cls.script.id + ) + + def test_cache_request(self): + response = self.script_endpoint.run(cache_key='123456') + self.assertEqual(response.result['stdout'], '12') diff --git a/tests/integration_test_custom_socket.py b/tests/integration_test_custom_socket.py new file mode 100644 index 0000000..16b4623 --- /dev/null +++ b/tests/integration_test_custom_socket.py @@ -0,0 +1,249 @@ +# -*- coding: utf-8 -*- +import time + +from syncano.models import ( + Class, + ClassDependency, + CustomSocket, + Endpoint, + RuntimeChoices, + Script, + ScriptCall, + ScriptDependency, + ScriptEndpoint, + SocketEndpoint +) +from tests.integration_test import InstanceMixin, IntegrationTest + + +class CustomSocketTest(InstanceMixin, IntegrationTest): + + def test_install_custom_socket(self): + # this tests new ScriptEndpoint dependency create; + self.assert_custom_socket('installing', self._define_dependencies_new_script_endpoint) + + def test_dependencies_new_script(self): + self.assert_custom_socket('new_script_installing', self._define_dependencies_new_script) + + def test_dependencies_existing_script(self): + self.assert_custom_socket('existing_script_installing', self._define_dependencies_existing_script) + + def test_dependencies_existing_script_endpoint(self): + self.assert_custom_socket('existing_script_e_installing', + self._define_dependencies_existing_script_endpoint) + + def test_creating_raw_data(self): + custom_socket = CustomSocket.please.create( + name='my_custom_socket_123', + endpoints={ + "my_custom_endpoint_123": { + "calls": [{"type": "script", "name": "script_123", "methods": ["GET", "POST"]}] + } + }, + dependencies=[ + { + "type": "script", + "runtime_name": "python_library_v5.0", + "name": "script_123", + "source": "print(123)" + }, + { + "type": "class", + "name": "klass", + "schema": [ + {"name": "fieldA", "type": "string"}, + {"name": "fieldB", "type": "integer"}, + ] + } + ] + ) + + self.assertTrue(custom_socket.name) + + def test_custom_socket_run(self): + suffix = 'default' + custom_socket = self._create_custom_socket(suffix, self._define_dependencies_new_script_endpoint) + self._assert_custom_socket(custom_socket) + results = custom_socket.run('my_endpoint_{}'.format(suffix)) + self.assertEqual(results['result']['stdout'], suffix) + + def test_custom_socket_recheck(self): + suffix = 'recheck' + custom_socket = self._create_custom_socket(suffix, self._define_dependencies_new_script_endpoint) + self._assert_custom_socket(custom_socket) + custom_socket = custom_socket.recheck() + self._assert_custom_socket(custom_socket) + + def test_fetching_all_endpoints(self): + all_endpoints = SocketEndpoint.get_all_endpoints() + self.assertTrue(isinstance(all_endpoints, list)) + self.assertTrue(len(all_endpoints) >= 1) + self.assertTrue(all_endpoints[0].name) + + def test_endpoint_run(self): + script_endpoint = SocketEndpoint.get_all_endpoints()[0] + result = script_endpoint.run() + self.assertIsInstance(result, dict) + self.assertTrue(result['result']['stdout']) + + def test_custom_socket_update(self): + socket_to_update = self._create_custom_socket('to_update', self._define_dependencies_new_script_endpoint) + socket_to_update.remove_endpoint(endpoint_name='my_endpoint_to_update') + + new_endpoint = Endpoint(name='my_endpoint_new_to_update') + new_endpoint.add_call( + ScriptCall(name='script_default', methods=['GET']) + ) + + socket_to_update.add_endpoint(new_endpoint) + socket_to_update.update() + time.sleep(2) # wait for custom socket setup; + socket_to_update.reload() + self.assertIn('my_endpoint_new_to_update', socket_to_update.endpoints) + + def test_class_dependency_new(self): + suffix = 'new_class' + custom_socket = self._create_custom_socket(suffix, self._define_dependencies_new_class) + self._assert_custom_socket(custom_socket) + + def test_class_dependency_existing(self): + suffix = 'existing_class' + custom_socket = self._create_custom_socket(suffix, self._define_dependencies_new_class) + self._assert_custom_socket(custom_socket) + + def assert_custom_socket(self, suffix, dependency_method): + custom_socket = self._create_custom_socket(suffix, dependency_method=dependency_method) + self._assert_custom_socket(custom_socket) + + def _assert_custom_socket(self, custom_socket): + self._wait_till_socket_process(custom_socket) + self.assertTrue(custom_socket.name) + self.assertTrue(custom_socket.created_at) + self.assertTrue(custom_socket.updated_at) + + @classmethod + def _create_custom_socket(cls, suffix, dependency_method): + custom_socket = CustomSocket(name='my_custom_socket_{}'.format(suffix)) + + cls._define_endpoints(suffix, custom_socket) + dependency_method(suffix, custom_socket) + + custom_socket.install() + return custom_socket + + @classmethod + def _define_endpoints(cls, suffix, custom_socket): + endpoint = Endpoint(name='my_endpoint_{}'.format(suffix)) + endpoint.add_call( + ScriptCall( + name='script_endpoint_{}'.format(suffix), + methods=['GET', 'POST'] + ) + ) + custom_socket.add_endpoint(endpoint) + + @classmethod + def _define_dependencies_new_class(cls, suffix, custom_socket): + cls._add_base_script(suffix, custom_socket) + custom_socket.add_dependency( + ClassDependency( + Class( + name="test_class_{}".format(suffix), + schema=[ + {"name": "testA", "type": "string"}, + {"name": "testB", "type": "integer"}, + ] + ) + ) + ) + + @classmethod + def _define_dependencies_existing_class(cls, suffix, custom_socket): + cls._add_base_script(suffix, custom_socket) + klass = Class( + name="test_class_{}".format(suffix), + schema=[ + {"name": "testA", "type": "string"}, + {"name": "testB", "type": "integer"}, + ] + ) + klass.save() + custom_socket.add_dependency( + ClassDependency( + klass + ) + ) + + @classmethod + def _define_dependencies_new_script_endpoint(cls, suffix, custom_socket): + script = cls._create_script(suffix) + script_endpoint = ScriptEndpoint( + name='script_endpoint_{}'.format(suffix), + script=script.id + ) + custom_socket.add_dependency( + ScriptDependency( + script_endpoint + ) + ) + + @classmethod + def _define_dependencies_new_script(cls, suffix, custom_socket): + custom_socket.add_dependency( + ScriptDependency( + Script( + source='print("{}")'.format(suffix), + runtime_name=RuntimeChoices.PYTHON_V5_0 + ), + name='script_endpoint_{}'.format(suffix), + ) + ) + + @classmethod + def _define_dependencies_existing_script(cls, suffix, custom_socket): + # create Script first: + cls._create_script(suffix) + custom_socket.add_dependency( + ScriptDependency( + Script.please.first(), + name='script_endpoint_{}'.format(suffix), + ) + ) + + @classmethod + def _define_dependencies_existing_script_endpoint(cls, suffix, custom_socket): + script = cls._create_script(suffix) + ScriptEndpoint.please.create( + name='script_endpoint_{}'.format(suffix), + script=script.id + ) + custom_socket.add_dependency( + ScriptDependency( + ScriptEndpoint.please.first() + ) + ) + + @classmethod + def _add_base_script(cls, suffix, custom_socket): + custom_socket.add_dependency( + ScriptDependency( + Script( + source='print("{}")'.format(suffix), + runtime_name=RuntimeChoices.PYTHON_V5_0 + ), + name='script_endpoint_{}'.format(suffix), + ) + ) + + @classmethod + def _create_script(cls, suffix): + return Script.please.create( + label='script_{}'.format(suffix), + runtime_name=RuntimeChoices.PYTHON_V5_0, + source='print("{}")'.format(suffix) + ) + + @classmethod + def _wait_till_socket_process(cls, custom_socket): + while custom_socket.status == 'checking': + custom_socket.reload() diff --git a/tests/integration_test_data_endpoint.py b/tests/integration_test_data_endpoint.py new file mode 100644 index 0000000..6ce9c1e --- /dev/null +++ b/tests/integration_test_data_endpoint.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +import re + +from syncano.models import Class, DataEndpoint, Object, ResponseTemplate +from tests.integration_test import InstanceMixin, IntegrationTest + + +class DataEndpointTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(DataEndpointTest, cls).setUpClass() + + schema = [ + { + 'name': 'title', + 'type': 'string', + 'order_index': True, + 'filter_index': True + } + ] + + template_content = ''' + {% if action == 'list' %} + {% set objects = response.objects %} + {% elif action == 'retrieve' %} + {% set objects = [response] %} + {% else %} + {% set objects = [] %} + {% endif %} + {% if objects %} + + + + {% for key in objects[0] if key not in fields_to_skip %} + {{ key }} + {% endfor %} + + {% for object in objects %} + + {% for key, value in object.iteritems() if key not in fields_to_skip %} + {{ value }} + {% endfor %} + + {% endfor %} + + {% endif %} + ''' + + template_context = { + "tr_header_classes": "", + "th_header_classes": "", + "tr_row_classes": "", + "table_classes": "", + "td_row_classes": "", + "fields_to_skip": [ + "id", + "channel", + "channel_room", + "group", + "links", + "group_permissions", + "owner_permissions", + "other_permissions", + "owner", + "revision", + "updated_at", + "created_at" + ] + } + + cls.klass = Class(name='test_class', schema=schema).save() + cls.template = ResponseTemplate( + name='test_template', + content=template_content, + content_type='text/html', + context=template_context + ).save() + cls.data_endpoint = DataEndpoint(name='test_endpoint', class_name='test_class').save() + + def setUp(self): + for obj in self.instance.classes.get(name='test_class').objects.all(): + obj.delete() + + def test_template_response(self): + Object(class_name=self.klass.name, title='test_title').save() + response = list(self.data_endpoint.get(response_template=self.template)) + self.assertEqual(len(response), 1, 'Data endpoint should return 1 element if queried with response_template.') + data = re.sub('[\s+]', '', response[0]) + self.assertEqual(data, '
title
test_title
') + + def test_create_object(self): + objects_count = len(list(self.data_endpoint.get())) + self.assertEqual(objects_count, 0) + self.data_endpoint.add_object(title='another title') + objects_count = len(list(self.data_endpoint.get())) + self.assertEqual(objects_count, 1, 'New object should have been created.') + obj = next(self.data_endpoint.get()) + self.assertEqual(obj['title'], 'another title', 'Created object should have proper title.') diff --git a/tests/integration_test_data_objects.py b/tests/integration_test_data_objects.py new file mode 100644 index 0000000..cc7ca06 --- /dev/null +++ b/tests/integration_test_data_objects.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +from hashlib import md5 + +import requests +import six +from syncano.models import Object +from tests.integration_test import InstanceMixin, IntegrationTest + +try: + # python2 + from StringIO import StringIO +except ImportError: + # python3 + from io import StringIO + + +class DataObjectFileTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(DataObjectFileTest, cls).setUpClass() + + cls.schema = [ + {'name': 'test_field_a', 'type': 'string'}, + {'name': 'test_field_file', 'type': 'file'}, + ] + cls.class_name = 'test_object_file' + cls.initial_field_a = 'some_string' + cls.file_path = 'tests/test_files/python-logo.png' + cls.instance.classes.create( + name=cls.class_name, + schema=cls.schema + ) + with open(cls.file_path, 'rb') as f: + cls.file_md5 = cls.get_file_md5(f) + + def test_creating_file_object(self): + data_object = self._create_object_with_file() + self.assertEqual(data_object.test_field_a, self.initial_field_a) + self.assert_file_md5(data_object) + + def test_updating_another_field(self): + data_object = self._create_object_with_file() + file_url = data_object.test_field_file + + # no changes made to the file; + update_string = 'some_other_string' + data_object.test_field_a = update_string + data_object.save() + + self.assertEqual(data_object.test_field_file, file_url) + self.assertEqual(data_object.test_field_a, update_string) + self.assert_file_md5(data_object) + + def test_updating_file_field(self): + data_object = self._create_object_with_file() + file_url = data_object.test_field_file + + update_string = 'updating also field a' + file_content = 'some example text file' + new_file = StringIO(file_content) + data_object.test_field_file = new_file + data_object.test_field_a = update_string + data_object.save() + + self.assertEqual(data_object.test_field_a, update_string) + self.assertNotEqual(data_object.test_field_file, file_url) + + # check file content; + file_content_s3 = self.get_s3_file(data_object.test_field_file) + self.assertEqual(file_content_s3, file_content) + + def test_manager_update(self): + data_object = self._create_object_with_file() + file_url = data_object.test_field_file + # update only string field; + update_string = 'manager updating' + Object.please.update( + id=data_object.id, + class_name=self.class_name, + test_field_a=update_string + ) + + data_object = Object.please.get(id=data_object.id, class_name=self.class_name) + self.assertEqual(data_object.test_field_a, update_string) + # shouldn't change; + self.assertEqual(data_object.test_field_file, file_url) + + # update also a file; + new_update_string = 'manager with file update' + file_content = 'manager file update' + new_file = StringIO(file_content) + Object.please.update( + id=data_object.id, + class_name=self.class_name, + test_field_a=new_update_string, + test_field_file=new_file + ) + + data_object = Object.please.get(id=data_object.id, class_name=self.class_name) + self.assertEqual(data_object.test_field_a, new_update_string) + # should change; + self.assertNotEqual(data_object.test_field_file, file_url) + + # check file content; + file_content_s3 = self.get_s3_file(data_object.test_field_file) + self.assertEqual(file_content_s3, file_content) + + def test_manager_create(self): + create_string = 'manager create' + with open(self.file_path, 'rb') as f: + data_object = Object.please.create( + class_name=self.class_name, + test_field_a=create_string, + test_field_file=f + ) + + self.assertEqual(data_object.test_field_a, create_string) + self.assert_file_md5(data_object) + + @classmethod + def get_file_md5(cls, file_content): + if not isinstance(file_content, (six.string_types, six.binary_type)): + file_content = file_content.read() + return md5(file_content).hexdigest() + + def assert_file_md5(self, data_object): + file_content = requests.get(data_object.test_field_file).content + file_md5 = self.get_file_md5(file_content) + self.assertEqual(self.file_md5, file_md5) + + @classmethod + def get_s3_file(cls, url): + file_content_s3 = requests.get(url).content + if hasattr(file_content_s3, 'decode'): + file_content_s3 = file_content_s3.decode('utf-8') + return file_content_s3 + + def _create_object_with_file(self): + with open('tests/test_files/python-logo.png', 'rb') as f: + data_object = Object.please.create( + class_name=self.class_name, + test_field_a=self.initial_field_a, + test_field_file=f, + ) + return data_object diff --git a/tests/integration_test_geo.py b/tests/integration_test_geo.py new file mode 100644 index 0000000..685f828 --- /dev/null +++ b/tests/integration_test_geo.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +import six +from syncano.exceptions import SyncanoValueError +from syncano.models import Class, Distance, GeoPoint, Object +from tests.integration_test import InstanceMixin, IntegrationTest + + +class GeoPointApiTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(GeoPointApiTest, cls).setUpClass() + + cls.city_model = Class.please.create( + instance_name=cls.instance.name, + name='city', + schema=[ + {"name": "city", "type": "string"}, + {"name": "location", "type": "geopoint", "filter_index": True}, + ] + ) + + cls.warsaw = cls.city_model.objects.create(location=(52.2240698, 20.9942933), city='Warsaw') + cls.paris = cls.city_model.objects.create(location=(52.4731384, 13.5425588), city='Berlin') + cls.berlin = cls.city_model.objects.create(location=(48.8589101, 2.3125377), city='Paris') + cls.london = cls.city_model.objects.create(city='London') + + cls.list_london = ['London'] + cls.list_warsaw = ['Warsaw'] + cls.list_warsaw_berlin = ['Warsaw', 'Berlin'] + cls.list_warsaw_berlin_paris = ['Warsaw', 'Berlin', 'Paris'] + + def test_filtering_on_geo_point_near(self): + + distances = { + 100: self.list_warsaw, + 600: self.list_warsaw_berlin, + 1400: self.list_warsaw_berlin_paris + } + + for distance, cities in six.iteritems(distances): + objects = Object.please.list(instance_name=self.instance.name, class_name="city").filter( + location__near={ + "latitude": 52.2297, + "longitude": 21.0122, + "kilometers": distance, + } + ) + + result_list = self._prepare_result_list(objects) + + self.assertListEqual(result_list, cities) + + def test_filtering_on_geo_point_near_miles(self): + objects = Object.please.list(instance_name=self.instance.name, class_name="city").filter( + location__near={ + "latitude": 52.2297, + "longitude": 21.0122, + "miles": 10, + } + ) + result_list = self._prepare_result_list(objects) + self.assertListEqual(result_list, self.list_warsaw) + + def test_filtering_on_geo_point_near_with_another_syntax(self): + objects = self.city_model.objects.filter( + location__near=(GeoPoint(52.2297, 21.0122), Distance(kilometers=10)) + ) + result_list = self._prepare_result_list(objects) + self.assertListEqual(result_list, self.list_warsaw) + + objects = self.city_model.objects.filter( + location__near=(GeoPoint(52.2297, 21.0122), Distance(miles=10)) + ) + result_list = self._prepare_result_list(objects) + self.assertListEqual(result_list, self.list_warsaw) + + def test_filtering_on_geo_point_exists(self): + objects = self.city_model.objects.filter( + location__exists=True + ) + + result_list = [o.city for o in objects] + + self.assertListEqual(result_list, self.list_warsaw_berlin_paris) + + objects = self.city_model.objects.filter( + location__exists=False + ) + + result_list = self._prepare_result_list(objects) + + self.assertListEqual(result_list, self.list_london) + + def test_distance_fail(self): + with self.assertRaises(SyncanoValueError): + self.city_model.objects.filter( + location__near=(GeoPoint(52.2297, 21.0122), Distance(miles=10, kilometers=20)) + ) + + with self.assertRaises(SyncanoValueError): + self.city_model.objects.filter( + location__near=(GeoPoint(52.2297, 21.0122), Distance()) + ) + + def _prepare_result_list(self, objects): + return [o.city for o in objects] diff --git a/tests/integration_test_push.py b/tests/integration_test_push.py new file mode 100644 index 0000000..2eaed62 --- /dev/null +++ b/tests/integration_test_push.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +import uuid + +from syncano.exceptions import SyncanoRequestError +from syncano.models import APNSConfig, APNSDevice, APNSMessage, GCMConfig, GCMDevice, GCMMessage +from tests.integration_test import InstanceMixin, IntegrationTest + + +class PushIntegrationTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(PushIntegrationTest, cls).setUpClass() + + cls.gcm_config = GCMConfig( + development_api_key=uuid.uuid4().hex, + instance_name=cls.instance.name + ) + cls.gcm_config.save() + + with open('tests/certificates/ApplePushDevelopment.p12', 'rb') as cert: + cls.apns_config = APNSConfig( + development_certificate=cert, + development_certificate_name='test', + development_bundle_identifier='test1234', + instance_name=cls.instance.name + ) + cls.apns_config.save() + + cls.environment = 'development' + cls.gcm_device = GCMDevice( + instance_name=cls.instance.name, + label='example label', + registration_id=86152312314401555, + device_id='10000000001', + ) + cls.gcm_device.save() + + cls.apns_device = APNSDevice( + instance_name=cls.instance.name, + label='example label', + registration_id='4719084371920471208947120984731208947910827409128470912847120894', + device_id='7189d7b9-4dea-4ecc-aa59-8cc61a20608a', + ) + cls.apns_device.save() + + +class PushNotificationTest(PushIntegrationTest): + + def test_gcm_config_update(self): + gcm_config = GCMConfig.please.get() + new_key = uuid.uuid4().hex + gcm_config.development_api_key = new_key + gcm_config.save() + + gcm_config_ = GCMConfig.please.get() + self.assertEqual(gcm_config_.development_api_key, new_key) + + def test_apns_config_update(self): + apns_config = APNSConfig.please.get() + new_cert_name = 'new cert name' + apns_config.development_certificate_name = new_cert_name + apns_config.save() + + apns_config_ = APNSConfig.please.get() + self.assertEqual(apns_config_.development_certificate_name, new_cert_name) + + def test_gcm_device(self): + device = GCMDevice( + instance_name=self.instance.name, + label='example label', + registration_id=86152312314401666, + device_id='10000000001', + ) + self._test_device(device, GCMDevice.please) + + def test_apns_device(self): + device = APNSDevice( + instance_name=self.instance.name, + label='example label', + registration_id='4719084371920471208947120984731208947910827409128470912847120222', + device_id='7189d7b9-4dea-4ecc-aa59-8cc61a20608a', + ) + + self._test_device(device, APNSDevice.please) + + def test_send_message_gcm(self): + + self.assertEqual(0, len(list(GCMMessage.please.all()))) + + self.gcm_device.send_message(content={'environment': self.environment, 'data': {'c': 'more_c'}}) + + self.assertEqual(1, len(list(GCMMessage.please.all()))) + + def test_send_message_apns(self): + self.assertEqual(0, len(list(APNSMessage.please.all()))) + + self.apns_device.send_message(content={'environment': 'development', 'aps': {'alert': 'alert test'}}) + + self.assertEqual(1, len(list(APNSMessage.please.all()))) + + def test_gcm_message(self): + message = GCMMessage( + instance_name=self.instance.name, + content={ + 'registration_ids': ['TESTIDREGISRATION', ], + 'environment': 'production', + 'data': { + 'param1': 'test' + } + } + ) + + self._test_message(message, GCMMessage.please) # we want this to fail; no productions keys; + + def test_apns_message(self): + message = APNSMessage( + instance_name=self.instance.name, + content={ + 'registration_ids': ['TESTIDREGISRATION', ], + 'environment': 'production', + 'aps': {'alert': 'semo example label'} + } + ) + + self._test_message(message, APNSMessage.please) # we want this to fail; no productions certs; + + def _test_device(self, device, manager): + + device.save() + + self.assertEqual(len(list(manager.all(instance_name=self.instance.name,))), 2) + + # test get: + device_ = manager.get(instance_name=self.instance.name, registration_id=device.registration_id) + + self.assertEqual(device_.label, device.label) + self.assertEqual(device_.registration_id, device.registration_id) + self.assertEqual(device_.device_id, device.device_id) + + # test update: + new_label = 'totally new label' + device.label = new_label + device.save() + + device_ = manager.get(instance_name=self.instance.name, registration_id=device.registration_id) + self.assertEqual(new_label, device_.label) + + device.delete() + + def _test_message(self, message, manager): + self.assertFalse(manager.all(instance_name=self.instance.name)) + + with self.assertRaises(SyncanoRequestError): + # unable to save because of lack of API key; + message.save() diff --git a/tests/integration_test_register.py b/tests/integration_test_register.py new file mode 100644 index 0000000..14a1d09 --- /dev/null +++ b/tests/integration_test_register.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +import os +import random +import unittest + +import syncano + + +class RegistrationTest(unittest.TestCase): + + def test_register(self): + connection = syncano.connect( + host=os.getenv('INTEGRATION_API_ROOT'), + ) + + email = 'syncano.bot+997999{}@syncano.com'.format(random.randint(100000, 50000000)) + + connection.connection().register( + email=email, + password='test11', + first_name='Jan', + last_name='Nowak' + ) + + # test if LIB has a key now; + account_info = connection.connection().get_account_info() + self.assertIn('email', account_info) + self.assertEqual(account_info['email'], email) diff --git a/tests/integration_test_relations.py b/tests/integration_test_relations.py new file mode 100644 index 0000000..052a424 --- /dev/null +++ b/tests/integration_test_relations.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +from syncano.models import Class +from tests.integration_test import InstanceMixin, IntegrationTest + + +class RelationApiTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(RelationApiTest, cls).setUpClass() + + # prapare data + cls.author = Class.please.create(name="author", schema=[ + {"name": "name", "type": "string", "filter_index": True}, + {"name": "birthday", "type": "integer"}, + ]) + + cls.book = Class.please.create(name="book", schema=[ + {"name": "title", "type": "string", "filter_index": True}, + {"name": "authors", "type": "relation", "target": "author", "filter_index": True}, + ]) + + cls.prus = cls.author.objects.create(name='Bolesław Prus', birthday=1847) + cls.lem = cls.author.objects.create(name='Stanisław Lem', birthday=1921) + cls.coehlo = cls.author.objects.create(name='Paulo Coehlo', birthday=1947) + + cls.lalka = cls.book.objects.create(authors=[cls.prus.id], title='Lalka') + cls.niezwyciezony = cls.book.objects.create(authors=[cls.lem.id], title='Niezwyciężony') + cls.brida = cls.book.objects.create(authors=[cls.coehlo.id], title='Brida') + + def test_integers_list(self): + authors_list_ids = [self.prus.id, self.coehlo.id] + book = self.book.objects.create(authors=authors_list_ids, title='Strange title') + self.assertListEqual(sorted(book.authors), sorted(authors_list_ids)) + + book.delete() + + def test_object_list(self): + authors_list_ids = [self.prus.id, self.coehlo.id] + book = self.book.objects.create(authors=authors_list_ids, title='Strange title') + self.assertListEqual(sorted(book.authors), sorted(authors_list_ids)) + + book.delete() + + def test_object_assign(self): + self.lalka.authors = [self.lem, self.coehlo] + self.lalka.save() + + self.assertListEqual(sorted(self.lalka.authors), sorted([self.lem.id, self.coehlo.id])) + self.lalka.authors = [self.prus] + self.lalka.save() + + def test_related_field_add(self): + self.niezwyciezony.authors_set.add(self.coehlo) + self.assertListEqual(sorted(self.niezwyciezony.authors), sorted([self.lem.id, self.coehlo.id])) + + self.niezwyciezony.authors_set.add(self.prus.id, self.coehlo.id) + self.assertListEqual(sorted(self.niezwyciezony.authors), sorted([self.lem.id, self.prus.id, self.coehlo.id])) + + self.niezwyciezony.authors = [self.lem] + self.niezwyciezony.save() + + def test_related_field_remove(self): + self.brida.authors_set.remove(self.coehlo) + self.assertEqual(self.brida.authors, None) + + self.niezwyciezony.authors_set.remove(self.prus, self.lem, self.coehlo) + self.assertEqual(self.niezwyciezony.authors, None) + + self.niezwyciezony.authors = [self.lem] + self.niezwyciezony.save() + self.brida.authors = [self.coehlo] + self.brida.save() + + def test_related_field_lookup_contains(self): + filtered_books = self.book.objects.list().filter(authors__contains=[self.prus]) + + self.assertEqual(len(list(filtered_books)), 1) + + for book in filtered_books: + self.assertEqual(book.title, self.lalka.title) + + def test_related_field_lookup_contains_fail(self): + filtered_books = self.book.objects.list().filter(authors__contains=[self.prus, self.lem]) + self.assertEqual(len(list(filtered_books)), 0) + + def test_related_field_lookup_is(self): + filtered_books = self.book.objects.list().filter(authors__name__startswith='Stan') + + self.assertEqual(len(list(filtered_books)), 1) + for book in filtered_books: + self.assertEqual(book.title, self.niezwyciezony.title) + + def test_multiple_lookups(self): + filtered_books = self.book.objects.list().filter(authors__id__in=[self.prus.id], title__eq='Lalka') + + self.assertEqual(len(list(filtered_books)), 1) + for book in filtered_books: + self.assertEqual(book.title, self.lalka.title) diff --git a/tests/integration_test_reponse_templates.py b/tests/integration_test_reponse_templates.py new file mode 100644 index 0000000..51045d1 --- /dev/null +++ b/tests/integration_test_reponse_templates.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- + +from syncano.models import Class, ResponseTemplate +from tests.integration_test import InstanceMixin, IntegrationTest + + +class ResponseTemplateApiTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(ResponseTemplateApiTest, cls).setUpClass() + cls.to_delete = cls.instance.templates.create(name='to_delete', content="
", + content_type='text/html', context={'one': 1}) + cls.for_update = cls.instance.templates.create(name='to_update', content="
", + content_type='text/html', context={'one': 1}) + + def test_retrieve_api(self): + template = ResponseTemplate.please.get(name='to_update') + self.assertTrue(isinstance(template, ResponseTemplate)) + self.assertEqual(template.name, 'to_update') + self.assertEqual(template.content, '
') + self.assertEqual(template.content_type, 'text/html') + self.assertEqual(template.context, {'one': 1}) + + def test_create_api(self): + template = ResponseTemplate.please.create(name='just_created', content='
', content_type='text/html', + context={'two': 2}) + self.assertTrue(isinstance(template, ResponseTemplate)) + + def test_delete_api(self): + ResponseTemplate.please.delete(name='to_delete') + with self.assertRaises(ResponseTemplate.DoesNotExist): + ResponseTemplate.please.get(name='to_delete') + + def test_update_api(self): + self.for_update.content = "
Hello!
" + self.for_update.save() + + template = ResponseTemplate.please.get(name='to_update') + self.assertEqual(template.content, "
Hello!
") + + def test_render_api(self): + render_template = self.instance.templates.create(name='to_render', + content="{% for o in objects %}
  • {{ o }}
  • {% endfor %}", + content_type='text/html', context={'objects': [1, 2]}) + + rendered = render_template.render() + self.assertEqual(rendered, '
  • 1
  • 2
  • ') + + rendered = render_template.render(context={'objects': [3]}) + self.assertEqual(rendered, '
  • 3
  • ') + + def test_rename(self): + name = 'some_old_new_name_for_template' + new_name = 'some_new_name_for_template' + + template = ResponseTemplate.please.create(name=name, content='
    ', content_type='text/html', + context={'two': 2}) + template = template.rename(new_name=new_name) + + self.assertEqual(template.name, new_name) + + def test_render_on_endpoint_list(self): + template_response = Class.please.template('objects_html_table').all() + + self.assertIn('', template_response[0]) # all() returns a list (precise: iterator) + self.assertIn('user_profile', template_response[0]) + + def test_render_on_endpoint_one_elem(self): + template_response = Class.please.template('objects_html_table').get(name='user_profile') + + self.assertIn('
    ', template_response) + self.assertIn('user_profile', template_response) diff --git a/tests/integration_test_snippet_config.py b/tests/integration_test_snippet_config.py new file mode 100644 index 0000000..de0eba7 --- /dev/null +++ b/tests/integration_test_snippet_config.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- + +from syncano.exceptions import SyncanoValueError +from tests.integration_test import InstanceMixin, IntegrationTest + + +class SnippetConfigTest(InstanceMixin, IntegrationTest): + + def test_update_config(self): + config = { + 'num': 123, + 'foo': 'bar', + 'arr': [1, 2, 3, 4], + 'another': { + 'num': 123, + 'foo': 'bar', + 'arr': [1, 2, 3, 4] + } + } + self.instance.set_config(config) + saved_config = self.instance.get_config() + self.assertDictContainsSubset(config, saved_config, 'Retrieved config should be equal to saved config.') + + def test_update_invalid_config(self): + with self.assertRaises(SyncanoValueError): + self.instance.set_config('invalid config') + with self.assertRaises(SyncanoValueError): + self.instance.set_config([1, 2, 3]) + + def test_update_existing_config(self): + config = { + 'foo': 'bar' + } + self.instance.set_config(config) + saved_config = self.instance.get_config() + self.assertIn('foo', saved_config, 'Retrieved config should contain saved key.') + new_config = { + 'new_foo': 'new_bar' + } + self.instance.set_config(new_config) + saved_config = self.instance.get_config() + self.assertDictContainsSubset(new_config, saved_config, 'Retrieved config should be equal to saved config.') + self.assertNotIn('foo', saved_config, 'Retrieved config should not contain old keys.') diff --git a/tests/integration_test_string_filtering.py b/tests/integration_test_string_filtering.py new file mode 100644 index 0000000..70aba17 --- /dev/null +++ b/tests/integration_test_string_filtering.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +from syncano.models import Object +from tests.integration_test import InstanceMixin, IntegrationTest + + +class StringFilteringTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(StringFilteringTest, cls).setUpClass() + cls.klass = cls.instance.classes.create(name='class_a', + schema=[{'name': 'title', 'type': 'string', 'filter_index': True}]) + cls.object = cls.klass.objects.create(title='Some great title') + + def _test_filter(self, filter): + filtered_obj = Object.please.list(class_name='class_a').filter( + **filter + ).first() + + self.assertTrue(filtered_obj.id) + + def test_starstwith(self): + self._test_filter({'title__startswith': 'Some'}) + self._test_filter({'title__istartswith': 'some'}) + + def test_endswith(self): + self._test_filter({'title__endswith': 'tle'}) + self._test_filter({'title__iendswith': 'TLE'}) + + def test_contains(self): + self._test_filter({'title__contains': 'gre'}) + self._test_filter({'title__icontains': 'gRe'}) + + def test_eq(self): + self._test_filter({'title__ieq': 'some gREAt title'}) diff --git a/tests/integration_test_user.py b/tests/integration_test_user.py new file mode 100644 index 0000000..e314e28 --- /dev/null +++ b/tests/integration_test_user.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +from syncano.exceptions import UserNotFound +from syncano.models import Group, User +from tests.integration_test import InstanceMixin, IntegrationTest + + +class UserProfileTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(UserProfileTest, cls).setUpClass() + cls.user = cls.instance.users.create( + username='JozinZBazin', + password='jezioro', + ) + cls.SAMPLE_PROFILE_PIC = 'some_url_here' + cls.ANOTHER_SAMPLE_PROFILE_PIC = 'yet_another_url' + + def test_profile(self): + self.assertTrue(self.user.profile) + self.assertEqual( + self.user.profile.__class__.__name__, + '{}UserProfileObject'.format(self.instance.name.title()) + ) + + def test_profile_klass(self): + klass = self.user.profile.get_class_object() + self.assertTrue(klass) + self.assertEqual(klass.instance_name, self.instance.name) + + def test_profile_change_schema(self): + klass = self.user.profile.get_class_object() + klass.schema = [ + {'name': 'profile_pic', 'type': 'string'} + ] + + klass.save() + self.user.reload() # force to refresh profile model; + + self.user.profile.profile_pic = self.SAMPLE_PROFILE_PIC + self.user.save() + user = User.please.get(id=self.user.id) + self.assertEqual(user.profile.profile_pic, self.SAMPLE_PROFILE_PIC) + + # test save directly on profile + self.user.profile.profile_pic = self.ANOTHER_SAMPLE_PROFILE_PIC + self.user.profile.save() + user = User.please.get(id=self.user.id) + self.assertEqual(user.profile.profile_pic, self.ANOTHER_SAMPLE_PROFILE_PIC) + + +class UserTest(InstanceMixin, IntegrationTest): + + @classmethod + def setUpClass(cls): + super(UserTest, cls).setUpClass() + + cls.group = cls.instance.groups.create( + label='testgroup' + ) + + def test_if_custom_error_is_raised_on_user_group(self): + with self.assertRaises(UserNotFound): + self.group.user_details(user_id=221) + + def test_user_group_membership(self): + user = User.please.create( + username='testa', + password='1234' + ) + + group_test = Group.please.create(label='new_group_a') + + groups = user.list_groups() + self.assertListEqual(groups, []) + + group = user.add_to_group(group_id=group_test.id) + self.assertEqual(group.id, group_test.id) + self.assertEqual(group.label, group_test.label) + + groups = user.list_groups() + self.assertEqual(len(groups), 1) + self.assertEqual(groups[0].id, group_test.id) + + group = user.group_details(group_id=group_test.id) + self.assertEqual(group.id, group_test.id) + self.assertEqual(group.label, group_test.label) + + response = user.remove_from_group(group_id=group_test.id) + self.assertIsNone(response) + + def test_group_user_membership(self): + user_test = User.please.create( + username='testb', + password='1234' + ) + + group = Group.please.create(label='new_group_b') + + users = group.list_users() + self.assertListEqual(users, []) + + user = group.add_user(user_id=user_test.id) + self.assertEqual(user.id, user_test.id) + self.assertEqual(user.username, user_test.username) + + users = group.list_users() + self.assertEqual(len(users), 1) + self.assertEqual(users[0].id, user_test.id) + + user = group.user_details(user_id=user_test.id) + self.assertEqual(user.id, user_test.id) + self.assertEqual(user.username, user_test.username) + + response = group.delete_user(user_id=user_test.id) + self.assertIsNone(response) diff --git a/tests/integration_tests_hosting.py b/tests/integration_tests_hosting.py new file mode 100644 index 0000000..b60f942 --- /dev/null +++ b/tests/integration_tests_hosting.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +import uuid + +from tests.integration_test import InstanceMixin, IntegrationTest + +try: + # python2 + from StringIO import StringIO +except ImportError: + # python3 + from io import StringIO + + +class HostingIntegrationTests(InstanceMixin, IntegrationTest): + + def test_create_file(self): + hosting = self._create_hosting('created-xyz') + a_hosting_file = StringIO() + a_hosting_file.write('h1 {color: #541231;}') + a_hosting_file.seek(0) + + hosting_file = hosting.upload_file(path='styles/main.css', file=a_hosting_file) + self.assertEqual(hosting_file.path, 'styles/main.css') + + def test_set_default(self): + hosting = self._create_hosting('default-xyz') + hosting = hosting.set_default() + self.assertTrue('default', hosting.is_default) + + def test_update_file(self): + hosting = self._create_hosting('update-xyz') + a_hosting_file = StringIO() + a_hosting_file.write('h1 {color: #541231;}') + a_hosting_file.seek(0) + + hosting.upload_file(path='styles/main.css', file=a_hosting_file) + + a_hosting_file = StringIO() + a_hosting_file.write('h2 {color: #541231;}') + a_hosting_file.seek(0) + + hosting_file = hosting.update_file(path='styles/main.css', file=a_hosting_file) + self.assertEqual(hosting_file.path, 'styles/main.css') + + def _create_hosting(self, name): + return self.instance.hostings.create( + name=name, + description='desc', + domains=['test.test{}.io'.format(uuid.uuid4().hex[:5])] + ) diff --git a/tests/test_channels.py b/tests/test_channels.py index 69489ba..ed8bdc8 100644 --- a/tests/test_channels.py +++ b/tests/test_channels.py @@ -92,7 +92,7 @@ def test_poll(self, poll_thread_mock, connection_mock): self.assertTrue(connection_mock.called) poll_thread_mock.assert_called_once_with( connection_mock, - '/v1/instances/None/channels/None/poll/', + '/v1.1/instances/None/channels/None/poll/', 'c', 'd', last_id='b', @@ -113,6 +113,6 @@ def test_publish(self, connection_mock): self.assertTrue(connection_mock.request.called) connection_mock.request.assert_called_once_with( 'POST', - '/v1/instances/None/channels/None/publish/', - data={'room': u'1', 'payload': '{"a": 1}'} + '/v1.1/instances/None/channels/None/publish/', + data={'room': '1', 'payload': '{"a": 1}'} ) diff --git a/tests/test_classes.py b/tests/test_classes.py new file mode 100644 index 0000000..8c89579 --- /dev/null +++ b/tests/test_classes.py @@ -0,0 +1,152 @@ +import unittest + +from syncano.exceptions import SyncanoValueError +from syncano.models import Instance, Object + +try: + from unittest import mock +except ImportError: + import mock + + +class ObjectTestCase(unittest.TestCase): + + def setUp(self): + self.schema = [ + { + 'name': 'title', + 'type': 'string', + 'order_index': True, + 'filter_index': True + }, + { + 'name': 'release_year', + 'type': 'integer', + 'order_index': True, + 'filter_index': True + }, + { + 'name': 'price', + 'type': 'float', + 'order_index': True, + 'filter_index': True + }, + { + 'name': 'author', + 'type': 'reference', + 'order_index': True, + 'filter_index': True, + 'target': 'Author' + } + ] + + @mock.patch('syncano.models.Object.get_subclass_model') + def test_new(self, get_subclass_model_mock): + get_subclass_model_mock.return_value = Instance + self.assertFalse(get_subclass_model_mock.called) + + with self.assertRaises(SyncanoValueError): + Object() + + with self.assertRaises(SyncanoValueError): + Object(instance_name='dummy') + + self.assertFalse(get_subclass_model_mock.called) + o = Object(instance_name='dummy', class_name='dummy', x=1, y=2) + self.assertIsInstance(o, Instance) + self.assertTrue(get_subclass_model_mock.called) + get_subclass_model_mock.assert_called_once_with('dummy', 'dummy') + + def test_create_subclass(self): + SubClass = Object.create_subclass('Test', self.schema) + fields = [f for f in SubClass._meta.fields if f not in Object._meta.fields] + + self.assertEqual(SubClass.__name__, 'Test') + + for schema, field in zip(self.schema, fields): + query_allowed = ('order_index' in schema or 'filter_index' in schema) + self.assertEqual(schema['name'], field.name) + self.assertEqual(field.query_allowed, query_allowed) + self.assertFalse(field.required) + self.assertFalse(field.read_only) + + @mock.patch('syncano.models.classes.registry') + @mock.patch('syncano.models.Object.create_subclass') + def test_get_or_create_subclass(self, create_subclass_mock, registry_mock): + create_subclass_mock.return_value = 1 + registry_mock.get_model_by_name.side_effect = [2, LookupError] + + self.assertFalse(registry_mock.get_model_by_name.called) + self.assertFalse(registry_mock.add.called) + self.assertFalse(create_subclass_mock.called) + + model = Object.get_or_create_subclass('test', [{}, {}]) + self.assertEqual(model, 2) + + self.assertTrue(registry_mock.get_model_by_name.called) + self.assertFalse(registry_mock.add.called) + self.assertFalse(create_subclass_mock.called) + registry_mock.get_model_by_name.assert_called_with('test') + + model = Object.get_or_create_subclass('test', [{}, {}]) + self.assertEqual(model, 1) + + self.assertTrue(registry_mock.get_model_by_name.called) + self.assertTrue(registry_mock.add.called) + self.assertTrue(create_subclass_mock.called) + + registry_mock.get_model_by_name.assert_called_with('test') + create_subclass_mock.assert_called_with('test', [{}, {}]) + registry_mock.add.assert_called_with('test', 1) + + self.assertEqual(registry_mock.get_model_by_name.call_count, 2) + self.assertEqual(registry_mock.add.call_count, 1) + self.assertEqual(create_subclass_mock.call_count, 1) + + def test_get_subclass_name(self): + self.assertEqual(Object.get_subclass_name('', ''), 'Object') + self.assertEqual(Object.get_subclass_name('duMMY', ''), 'DummyObject') + self.assertEqual(Object.get_subclass_name('', 'ClS'), 'ClsObject') + self.assertEqual(Object.get_subclass_name('duMMy', 'CLS'), 'DummyClsObject') + + @mock.patch('syncano.models.Manager.get') + def test_get_class_schema(self, get_mock): + get_mock.return_value = get_mock + self.assertFalse(get_mock.called) + result = Object.get_class_schema('dummy-instance', 'dummy-class') + self.assertTrue(get_mock.called) + self.assertEqual(result, get_mock.schema) + get_mock.assert_called_once_with('dummy-instance', 'dummy-class') + + @mock.patch('syncano.models.Object.create_subclass') + @mock.patch('syncano.models.Object.get_class_schema') + @mock.patch('syncano.models.manager.registry.get_model_by_name') + @mock.patch('syncano.models.Object.get_subclass_name') + @mock.patch('syncano.models.registry._default_connection') + @mock.patch('syncano.models.manager.Manager.serialize') + def test_get_subclass_model(self, serialize_mock, default_connection_mock, get_subclass_name_mock, + get_model_by_name_mock, get_class_schema_mock, create_subclass_mock): + + default_connection_mock.return_value = default_connection_mock + serialize_mock.return_value = serialize_mock + + create_subclass_mock.return_value = create_subclass_mock + get_subclass_name_mock.side_effect = [ + 'Object', + 'DummyObject', + 'DummyObject', + ] + + get_model_by_name_mock.side_effect = [ + Object, + LookupError + ] + + result = Object.get_subclass_model('', '') + self.assertEqual(Object, result) + + result = Object.get_subclass_model('', '') + self.assertEqual(Object, result) + + result = Object.get_subclass_model('', '') + self.assertEqual(create_subclass_mock, result) diff --git a/tests/test_connection.py b/tests/test_connection.py index fe539c6..ded59b5 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,9 +1,17 @@ +import json +import tempfile import unittest -from urlparse import urljoin -from syncano import connect, connect_instance -from syncano.connection import Connection, ConnectionMixin, default_connection +import six +from syncano import connect +from syncano.connection import Connection, ConnectionMixin from syncano.exceptions import SyncanoRequestError, SyncanoValueError +from syncano.models.registry import registry + +if six.PY3: + from urllib.parse import urljoin +else: + from urlparse import urljoin try: from unittest import mock @@ -13,70 +21,27 @@ class ConnectTestCase(unittest.TestCase): - @mock.patch('syncano.models.registry') - @mock.patch('syncano.connection.default_connection.open') - def test_connect(self, open_mock, registry_mock): - registry_mock.return_value = registry_mock - - self.assertFalse(registry_mock.called) + @mock.patch('syncano.connection.DefaultConnection.open') + def test_connect(self, open_mock): self.assertFalse(open_mock.called) connection = connect(1, 2, 3, a=1, b=2, c=3) open_mock.assert_called_once_with(1, 2, 3, a=1, b=2, c=3) self.assertTrue(open_mock.called) - self.assertEqual(connection, registry_mock) + self.assertEqual(connection, registry) - @mock.patch('syncano.connection.default_connection.open') + @mock.patch('syncano.models.registry.connection.open') @mock.patch('syncano.models.registry') @mock.patch('syncano.INSTANCE') def test_env_instance(self, instance_mock, registry_mock, *args): - self.assertFalse(registry_mock.set_default_instance.called) + instance_mock.return_value = 'test_instance' + self.assertFalse(registry_mock.set_used_instance.called) connect(1, 2, 3, a=1, b=2, c=3) - self.assertTrue(registry_mock.set_default_instance.called) - registry_mock.set_default_instance.assert_called_once_with(instance_mock) - - -class ConnectInstanceTestCase(unittest.TestCase): - - @mock.patch('syncano.connect') - def test_connect_instance(self, connect_mock): - connect_mock.return_value = connect_mock - get_mock = connect_mock.Instance.please.get - get_mock.return_value = get_mock - - self.assertFalse(connect_mock.called) - self.assertFalse(get_mock.called) - - instance = connect_instance('test-name', a=1, b=2) - - self.assertTrue(connect_mock.called) - self.assertTrue(get_mock.called) - - connect_mock.assert_called_once_with(a=1, b=2) - get_mock.assert_called_once_with('test-name') - self.assertEqual(instance, get_mock) - - @mock.patch('syncano.connect') - @mock.patch('syncano.INSTANCE') - def test_env_connect_instance(self, instance_mock, connect_mock): - connect_mock.return_value = connect_mock - get_mock = connect_mock.Instance.please.get - get_mock.return_value = get_mock - - self.assertFalse(connect_mock.called) - self.assertFalse(get_mock.called) - - instance = connect_instance(a=1, b=2) - - self.assertTrue(connect_mock.called) - self.assertTrue(get_mock.called) - - connect_mock.assert_called_once_with(a=1, b=2) - get_mock.assert_called_once_with(instance_mock) - self.assertEqual(instance, get_mock) + self.assertTrue(registry_mock.set_used_instance.called) + registry_mock.set_used_instance.assert_called_once_with(instance_mock) class ConnectionTestCase(unittest.TestCase): @@ -108,7 +73,7 @@ def test_debug(self, debug_mock, dumps_mock, post_mock): self.connection.make_request('POST', 'test') self.assertTrue(dumps_mock.called) dumps_mock.assert_called_once_with( - {'headers': {'content-type': 'application/json'}, 'timeout': 30, 'verify': False}, + {'files': [], 'headers': {'content-type': 'application/json'}, 'timeout': 30, 'verify': False}, sort_keys=True, indent=2, separators=(',', ': ')) @mock.patch('requests.Session.post') @@ -160,7 +125,7 @@ def test_build_params(self): self.assertTrue('headers' in params) self.assertTrue('Authorization' in params['headers']) - self.assertEqual(params['headers']['Authorization'], 'ApiKey {0}'.format(self.connection.api_key)) + self.assertEqual(params['headers']['Authorization'], 'token {0}'.format(self.connection.api_key)) self.assertTrue('content-type' in params['headers']) self.assertEqual(params['headers']['content-type'], self.connection.CONTENT_TYPE) @@ -216,6 +181,26 @@ def test_invalid_method_name(self): with self.assertRaises(SyncanoValueError): self.connection.make_request('INVALID', 'test') + @mock.patch('syncano.connection.Connection.get_response_content') + @mock.patch('requests.Session.patch') + def test_make_request_for_creating_object_with_file(self, patch_mock, get_response_mock): + kwargs = { + 'data': { + 'files': {'filename': tempfile.TemporaryFile(mode='w')} + } + } + # if FAIL will raise TypeError for json dump + self.connection.make_request('POST', 'test', **kwargs) + + @mock.patch('syncano.connection.Connection.get_response_content') + @mock.patch('requests.Session.patch') + def test_make_request_for_updating_object_with_file(self, patch_mock, get_reponse_mock): + kwargs = { + 'data': {'filename': tempfile.TemporaryFile(mode='w')} + } + # if FAIL will raise TypeError for json dump + self.connection.make_request('PATCH', 'test', **kwargs) + @mock.patch('requests.Session.post') def test_request_error(self, post_mock): post_mock.return_value = mock.MagicMock(status_code=404, text='Invalid request') @@ -273,14 +258,15 @@ def test_invalid_credentials(self, post_mock): self.assertTrue(post_mock.called) self.assertIsNone(self.connection.api_key) + call_args = post_mock.call_args[0] + call_kwargs = post_mock.call_args[1] + call_kwargs['data'] = json.loads(call_kwargs['data']) - post_mock.assert_called_once_with( - urljoin(self.connection.host, '{0}/'.format(self.connection.AUTH_SUFFIX)), - headers={'content-type': self.connection.CONTENT_TYPE}, - data='{"password": "dummy", "email": "dummy"}', - timeout=30, - verify=True - ) + self.assertEqual(call_args[0], urljoin(self.connection.host, '{0}/'.format(self.connection.AUTH_SUFFIX))) + self.assertEqual(call_kwargs['headers'], {'content-type': self.connection.CONTENT_TYPE}) + self.assertEqual(call_kwargs['timeout'], 30) + self.assertTrue(call_kwargs['verify']) + self.assertDictEqual(call_kwargs['data'], {"password": "dummy", "email": "dummy"}) @mock.patch('syncano.connection.Connection.make_request') def test_successful_authentication(self, make_request): @@ -294,11 +280,98 @@ def test_successful_authentication(self, make_request): self.assertIsNotNone(self.connection.api_key) self.assertEqual(self.connection.api_key, api_key) + @mock.patch('syncano.connection.Connection.make_request') + def test_get_account_info(self, make_request): + info = {'first_name': '', 'last_name': '', 'is_active': True, + 'id': 1, 'has_password': True, 'email': 'dummy'} + self.test_successful_authentication() + make_request.return_value = info + self.assertFalse(make_request.called) + self.assertIsNotNone(self.connection.api_key) + ret = self.connection.get_account_info() + self.assertTrue(make_request.called) + self.assertEqual(info, ret) + + @mock.patch('syncano.connection.Connection.make_request') + def test_get_account_info_with_api_key(self, make_request): + info = {'first_name': '', 'last_name': '', 'is_active': True, + 'id': 1, 'has_password': True, 'email': 'dummy'} + make_request.return_value = info + self.assertFalse(make_request.called) + self.assertIsNone(self.connection.api_key) + ret = self.connection.get_account_info(api_key='test') + self.assertIsNotNone(self.connection.api_key) + self.assertTrue(make_request.called) + self.assertEqual(info, ret) + + @mock.patch('syncano.connection.Connection.make_request') + def test_get_account_info_invalid_key(self, make_request): + err = SyncanoRequestError(403, 'No such API Key.') + make_request.side_effect = err + self.assertFalse(make_request.called) + self.assertIsNone(self.connection.api_key) + try: + self.connection.get_account_info(api_key='invalid') + self.assertTrue(False) + except SyncanoRequestError as e: + self.assertIsNotNone(self.connection.api_key) + self.assertTrue(make_request.called) + self.assertEqual(e, err) + + @mock.patch('syncano.connection.Connection.make_request') + def test_get_account_info_missing_key(self, make_request): + self.assertFalse(make_request.called) + self.assertIsNone(self.connection.api_key) + try: + self.connection.get_account_info() + self.assertTrue(False) + except SyncanoValueError: + self.assertIsNone(self.connection.api_key) + self.assertFalse(make_request.called) + + @mock.patch('syncano.connection.Connection.make_request') + def test_get_user_info(self, make_request_mock): + info = {'profile': {}} + make_request_mock.return_value = info + self.assertFalse(make_request_mock.called) + self.connection.api_key = 'Ala has a cat' + self.connection.user_key = 'Tom has a cat also' + self.connection.instance_name = 'tom_ala' + ret = self.connection.get_user_info() + self.assertTrue(make_request_mock.called) + self.assertEqual(info, ret) + + @mock.patch('syncano.connection.Connection.make_request') + def test_get_user_info_without_instance(self, make_request_mock): + info = {'profile': {}} + make_request_mock.return_value = info + self.assertFalse(make_request_mock.called) + self.connection.api_key = 'Ala has a cat' + self.connection.user_key = 'Tom has a cat also' + self.connection.instance_name = None + with self.assertRaises(SyncanoValueError): + self.connection.get_user_info() + + @mock.patch('syncano.connection.Connection.make_request') + def test_get_user_info_without_auth_keys(self, make_request_mock): + info = {'profile': {}} + make_request_mock.return_value = info + self.assertFalse(make_request_mock.called) + + self.connection.api_key = None + with self.assertRaises(SyncanoValueError): + self.connection.get_user_info() + + self.connection.api_key = 'Ala has a cat' + self.connection.user_key = None + with self.assertRaises(SyncanoValueError): + self.connection.get_user_info() + class DefaultConnectionTestCase(unittest.TestCase): def setUp(self): - self.connection = default_connection + self.connection = registry.connection self.connection._connection = None def test_call(self): @@ -324,7 +397,7 @@ class ConnectionMixinTestCase(unittest.TestCase): def setUp(self): self.mixin = ConnectionMixin() - @mock.patch('syncano.connection.default_connection') + @mock.patch('syncano.models.registry._default_connection') def test_getter(self, default_connection_mock): default_connection_mock.return_value = default_connection_mock diff --git a/tests/test_custom_response.py b/tests/test_custom_response.py new file mode 100644 index 0000000..eba7713 --- /dev/null +++ b/tests/test_custom_response.py @@ -0,0 +1,33 @@ +import json +import unittest + +from syncano.models.custom_response import CustomResponseHandler + + +class ObjectTestCase(unittest.TestCase): + + @classmethod + def setUpClass(cls): + cls.json_data = json.dumps({'one': 1, 'two': 2}) + + def _wrap_data(self): + return {'response': {'content': self.json_data, 'content_type': 'application/json'}} + + def test_default_json_handler(self): + custom_handler = CustomResponseHandler() + processed_data = custom_handler.process_response(self._wrap_data()) + + self.assertDictEqual(processed_data, json.loads(self.json_data)) + + def test_custom_json_handler(self): + + def json_custom_handler(response): + # return only two + return json.loads(response['response']['content'])['two'] + + custom_handler = CustomResponseHandler() + custom_handler.overwrite_handler('application/json', json_custom_handler) + + processed_data = custom_handler.process_response(self._wrap_data()) + + self.assertEqual(processed_data, json.loads(self.json_data)['two']) diff --git a/tests/test_deprecation_decorator.py b/tests/test_deprecation_decorator.py new file mode 100644 index 0000000..a751713 --- /dev/null +++ b/tests/test_deprecation_decorator.py @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +import unittest +import warnings + +from syncano.release_utils import Deprecated + + +class DeprecationDecoratorTestCase(unittest.TestCase): + + def test_deprecation_decorator(self): + + class SomeClass(object): + + @Deprecated(lineno=0, removed_in_version='5.0.10') + def some_deprecated_method(self): + pass + + with warnings.catch_warnings(record=True) as warning: + # Cause all warnings to always be triggered. + warnings.simplefilter('always') + # Trigger a warning. + SomeClass().some_deprecated_method() + # Verify some things + self.assertEqual(len(warning), 1) + self.assertEqual(warning[-1].category, DeprecationWarning) + self.assertIn('deprecated', str(warning[-1].message)) + self.assertIn('5.0.10', str(warning[-1].message)) diff --git a/tests/test_fields.py b/tests/test_fields.py index 6f6e1ea..694a9bd 100644 --- a/tests/test_fields.py +++ b/tests/test_fields.py @@ -1,8 +1,10 @@ +import json import unittest from datetime import datetime from functools import wraps from time import mktime +import six from syncano import models from syncano.exceptions import SyncanoValidationError, SyncanoValueError from syncano.models.manager import SchemaManager @@ -55,20 +57,23 @@ class AllFieldsModel(models.Model): choice_field = models.ChoiceField(choices=CHOICES) date_field = models.DateField() datetime_field = models.DateTimeField() - hyperlinked_field = models.HyperlinkedField() + hyperlinked_field = models.LinksField() model_field = models.ModelField('Instance') json_field = models.JSONField(schema=SCHEMA) schema_field = models.SchemaField() + array_field = models.ArrayField() + object_field = models.ObjectField() + geo_field = models.GeoPointField() class Meta: endpoints = { 'detail': { 'methods': ['delete', 'post', 'patch', 'get'], - 'path': '/v1/dummy/{dynamic_field}/', + 'path': '/v1.1/dummy/{dynamic_field}/', }, 'list': { 'methods': ['post', 'get'], - 'path': '/v1/dummy/', + 'path': '/v1.1/dummy/', } } @@ -105,11 +110,11 @@ def test_field_str(self): @skip_base_class def test_field_unicode(self): - expected = u'<{0}: {1}>'.format( + expected = six.u('<{0}: {1}>').format( self.field.__class__.__name__, self.field_name ) - out = unicode(self.field) + out = str(self.field) self.assertEqual(out, expected) @skip_base_class @@ -248,11 +253,11 @@ class StringFieldTestCase(BaseTestCase): def test_to_python(self): self.assertEqual(self.field.to_python(None), None) self.assertEqual(self.field.to_python('test'), 'test') - self.assertEqual(self.field.to_python(10), u'10') - self.assertEqual(self.field.to_python(10.0), u'10.0') - self.assertEqual(self.field.to_python(True), u'True') - self.assertEqual(self.field.to_python({'a': 1}), u"{'a': 1}") - self.assertEqual(self.field.to_python([1, 2]), u"[1, 2]") + self.assertEqual(self.field.to_python(10), '10') + self.assertEqual(self.field.to_python(10.0), '10.0') + self.assertEqual(self.field.to_python(True), 'True') + self.assertEqual(self.field.to_python({'a': 1}), "{'a': 1}") + self.assertEqual(self.field.to_python([1, 2]), "[1, 2]") class IntegerFieldTestCase(BaseTestCase): @@ -524,5 +529,65 @@ def test_to_native(self): schema = SchemaManager(value) self.assertEqual(self.field.to_native(None), None) - self.assertEqual(self.field.to_native(schema), '[{"type": "string", "name": "username"}]') - self.assertEqual(self.field.to_native(value), '[{"type": "string", "name": "username"}]') + self.assertListEqual(json.loads(self.field.to_native(schema)), [{"type": "string", "name": "username"}]) + self.assertListEqual(json.loads(self.field.to_native(value)), [{"type": "string", "name": "username"}]) + + +class ArrayFieldTestCase(BaseTestCase): + field_name = 'array_field' + + def test_validate(self): + + with self.assertRaises(SyncanoValueError): + self.field.validate("a", self.instance) + + with self.assertRaises(SyncanoValueError): + self.field.validate([1, 2, [12, 13]], self.instance) + + self.field.validate([1, 2, 3], self.instance) + self.field.validate("[1, 2, 3]", self.instance) + + def test_to_python(self): + with self.assertRaises(SyncanoValueError): + self.field.to_python('a') + + self.field.to_python([1, 2, 3, 4]) + self.field.to_python("[1, 2, 3, 4]") + + +class ObjectFieldTestCase(BaseTestCase): + field_name = 'object_field' + + def test_validate(self): + + with self.assertRaises(SyncanoValueError): + self.field.validate("a", self.instance) + + self.field.validate({'raz': 1, 'dwa': 2}, self.instance) + self.field.validate('{"raz": 1, "dwa": 2}', self.instance) + + def test_to_python(self): + with self.assertRaises(SyncanoValueError): + self.field.to_python('a') + + self.field.to_python({'raz': 1, 'dwa': 2}) + self.field.to_python('{"raz": 1, "dwa": 2}') + + +class GeoPointTestCase(BaseTestCase): + field_name = 'geo_field' + + def test_validate(self): + + with self.assertRaises(SyncanoValueError): + self.field.validate(12, self.instance) + + self.field.validate(models.GeoPoint(latitude=52.12, longitude=12.02), self.instance) + + def test_to_python(self): + with self.assertRaises(SyncanoValueError): + self.field.to_python(12) + + self.field.to_python((52.12, 12.02)) + self.field.to_python({'latitude': 52.12, 'longitude': 12.02}) + self.field.to_python(models.GeoPoint(52.12, 12.02)) diff --git a/tests/test_files/python-logo.png b/tests/test_files/python-logo.png new file mode 100644 index 0000000..738f6ed Binary files /dev/null and b/tests/test_files/python-logo.png differ diff --git a/tests/test_incentives.py b/tests/test_incentives.py new file mode 100644 index 0000000..6b2b3f4 --- /dev/null +++ b/tests/test_incentives.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +import json +import unittest +from datetime import datetime + +from syncano.exceptions import SyncanoValidationError +from syncano.models import ResponseTemplate, Script, ScriptEndpoint, ScriptEndpointTrace, ScriptTrace + +try: + from unittest import mock +except ImportError: + import mock + + +class ScriptTestCase(unittest.TestCase): + + def setUp(self): + self.model = Script() + + @mock.patch('syncano.models.Script._get_connection') + def test_run(self, connection_mock): + model = Script(instance_name='test', id=10, links={'run': '/v1.1/instances/test/snippets/scripts/10/run/'}) + connection_mock.return_value = connection_mock + connection_mock.request.return_value = {'id': 10} + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + result = model.run(a=1, b=2) + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + self.assertIsInstance(result, ScriptTrace) + + connection_mock.assert_called_once_with(a=1, b=2) + call_args = connection_mock.request.call_args[0] + call_kwargs = connection_mock.request.call_args[1] + call_kwargs['data']['payload'] = json.loads(call_kwargs['data']['payload']) + self.assertEqual(('POST', '/v1.1/instances/test/snippets/scripts/10/run/'), call_args) + self.assertDictEqual(call_kwargs['data'], {'payload': {"a": 1, "b": 2}}) + + model = Script() + with self.assertRaises(SyncanoValidationError): + model.run() + + +class ScriptEndpointTestCase(unittest.TestCase): + def setUp(self): + self.model = ScriptEndpoint() + + @mock.patch('syncano.models.ScriptEndpoint._get_connection') + def test_run(self, connection_mock): + model = ScriptEndpoint(instance_name='test', name='name', + links={'run': '/v1.1/instances/test/endpoints/scripts/name/run/'}) + connection_mock.return_value = connection_mock + connection_mock.request.return_value = { + 'status': 'success', + 'duration': 937, + 'result': {'stdout': 1, 'stderr': ''}, + 'executed_at': '2015-03-16T11:52:14.172830Z' + } + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + result = model.run(x=1, y=2) + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + self.assertIsInstance(result, ScriptEndpointTrace) + self.assertEqual(result.status, 'success') + self.assertEqual(result.duration, 937) + self.assertEqual(result.result, {'stdout': 1, 'stderr': ''}) + self.assertIsInstance(result.executed_at, datetime) + + connection_mock.assert_called_once_with(x=1, y=2) + connection_mock.request.assert_called_once_with( + 'POST', + '/v1.1/instances/test/endpoints/scripts/name/run/', + data={"y": 2, "x": 1} + ) + + model = ScriptEndpoint() + with self.assertRaises(SyncanoValidationError): + model.run() + + +class ResponseTemplateTestCase(unittest.TestCase): + def setUp(self): + self.model = ResponseTemplate + + @mock.patch('syncano.models.ResponseTemplate._get_connection') + def test_render(self, connection_mock): + model = self.model(instance_name='test', name='name', + links={'run': '/v1.1/instances/test/snippets/templates/name/render/'}) + connection_mock.return_value = connection_mock + connection_mock.request.return_value = '
    12345
    ' + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + response = model.render() + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + self.assertEqual(response, '
    12345
    ') + + connection_mock.request.assert_called_once_with( + 'POST', + '/v1.1/instances/test/snippets/templates/name/render/', + data={'context': {}} + ) diff --git a/tests/test_manager.py b/tests/test_manager.py index f3bd52b..4466c72 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -1,10 +1,9 @@ +import json import unittest from datetime import datetime -from syncano.exceptions import (SyncanoDoesNotExist, SyncanoRequestError, - SyncanoValueError) -from syncano.models.base import (CodeBox, CodeBoxTrace, Instance, Object, - Webhook, WebhookResult) +from syncano.exceptions import SyncanoDoesNotExist, SyncanoRequestError, SyncanoValueError +from syncano.models import Instance, Object, Script, ScriptEndpoint, ScriptEndpointTrace, ScriptTrace, User, registry try: from unittest import mock @@ -30,6 +29,22 @@ def setUp(self): self.model = Instance self.manager = Instance.please + def tearDown(self): + field_name = self.get_name_from_fields() + if field_name is not None: + field_name.default = None + + self.model = None + self.manager = None + registry.clear_used_instance() + + def get_name_from_fields(self): + names = [f for f in self.model._meta.fields + if f.name == 'name'] + if len(names) > 0: + return names[0] + return + def test_create(self): model_mock = mock.MagicMock() model_mock.return_value = model_mock @@ -43,15 +58,56 @@ def test_create(self): self.assertTrue(model_mock.save.called) self.assertEqual(instance, model_mock) - model_mock.assert_called_once_with(a=1, b=2) + model_mock.assert_called_once_with(a=1, b=2, is_lazy=False) model_mock.save.assert_called_once_with() - @mock.patch('syncano.models.manager.Manager.create') + @mock.patch('syncano.models.bulk.ModelBulkCreate.make_batch_request') def test_bulk_create(self, create_mock): self.assertFalse(create_mock.called) - self.manager.bulk_create({'a': 1}, {'a': 2}) + self.manager.bulk_create( + User(instance_name='A', username='a', password='a'), + User(instance_name='A', username='b', password='b') + ) self.assertTrue(create_mock.called) - self.assertEqual(create_mock.call_count, 2) + self.assertEqual(create_mock.call_count, 1) + + @mock.patch('syncano.models.manager.Manager.create') + @mock.patch('syncano.models.manager.Manager.update') + @mock.patch('syncano.models.manager.Manager.delete') + def test_batch(self, delete_mock, update_mock, create_mock): + self.assertFalse(delete_mock.called) + self.assertFalse(update_mock.called) + self.assertFalse(create_mock.called) + self.assertFalse(self.manager.is_lazy) + self.manager.batch( + self.manager.as_batch().update(id=2, a=1, b=3, name='Nabuchodonozor'), + self.manager.as_batch().create(a=2, b=3, name='Nabuchodonozor'), + self.manager.as_batch().delete(id=3, name='Nabuchodonozor'), + ) + self.assertFalse(self.manager.is_lazy) + self.assertEqual(delete_mock.call_count, 1) + self.assertEqual(update_mock.call_count, 1) + self.assertEqual(create_mock.call_count, 1) + + @mock.patch('syncano.models.archetypes.Model.batch_object') + def test_batch_object(self, batch_mock): + self.assertFalse(batch_mock.called) + self.manager.batch( + self.manager.as_batch().create(a=2, b=3, name='Nabuchodonozor'), + ) + self.assertTrue(batch_mock.called) + self.assertEqual(batch_mock.call_count, 1) + + @mock.patch('syncano.models.manager.Manager.request') + def test_batch_request(self, request_mock): + self.assertFalse(request_mock.called) + self.manager.batch( + self.manager.as_batch().update(a=2, b=3, name='Nabuchodonozor'), + ) + self.assertFalse(request_mock.called) # shouldn't be called when batch mode is on; + self.manager.update(a=2, b=3, name='Nabuchodonozor') + self.assertTrue(request_mock.called) + self.assertEqual(request_mock.call_count, 1) @mock.patch('syncano.models.manager.Manager.request') @mock.patch('syncano.models.manager.Manager._filter') @@ -160,9 +216,51 @@ def test_update(self, clone_mock, filter_mock, request_mock): filter_mock.assert_called_once_with(1, 2, a=1, b=2) request_mock.assert_called_once_with() - self.assertEqual(self.manager.method, 'PUT') + self.assertEqual(self.manager.method, 'PATCH') self.assertEqual(self.manager.endpoint, 'detail') - self.assertEqual(self.manager.data, {'x': 1, 'y': 2}) + self.assertEqual(self.manager.data, {'x': 1, 'y': 2, 'a': 1, 'b': 2}) + + result = self.manager.update(1, 2, a=1, b=2, x=3, y=2) + self.assertEqual(request_mock, result) + + self.assertEqual(self.manager.method, 'PATCH') + self.assertEqual(self.manager.endpoint, 'detail') + self.assertEqual(self.manager.data, {'x': 3, 'y': 2, 'a': 1, 'b': 2}) + + @mock.patch('syncano.models.manager.Manager.request') + @mock.patch('syncano.models.manager.Manager._filter') + @mock.patch('syncano.models.manager.Manager._clone') + @mock.patch('syncano.models.manager.Manager.serialize') + def test_update_with_filter(self, serializer_mock, clone_mock, filter_mock, request_mock, ): + serializer_mock.returnValue = Instance(name='test') + clone_mock.return_value = self.manager + request_mock.return_value = request_mock + + self.assertFalse(filter_mock.called) + self.assertFalse(request_mock.called) + + self.manager.filter(name=2).update(created_at=1, updated_at=2, links=1) + + self.assertTrue(filter_mock.called) + self.assertTrue(request_mock.called) + + filter_mock.assert_called_once_with(created_at=1, updated_at=2, links=1, name=2) + request_mock.assert_called_once_with() + + self.assertEqual(self.manager.data, {'created_at': 1, 'updated_at': 2, 'links': 1, 'name': 2}) + + @mock.patch('syncano.models.manager.Manager.request') + @mock.patch('syncano.models.manager.Manager._filter') + @mock.patch('syncano.models.manager.Manager._clone') + def test_update_with_filter_wrong_arg(self, clone_mock, filter_mock, request_mock): + clone_mock.return_value = self.manager + request_mock.return_value = request_mock + + self.assertFalse(filter_mock.called) + self.assertFalse(request_mock.called) + + with self.assertRaises(SyncanoValueError): + self.manager.filter(name='1', bad_arg='something').update(a=1, b=2, data={'x': 1, 'y': 2}) @mock.patch('syncano.models.manager.Manager.update') @mock.patch('syncano.models.manager.Manager.create') @@ -231,6 +329,19 @@ def test_list(self, clone_mock, filter_mock): self.assertEqual(self.manager.method, 'GET') self.assertEqual(self.manager.endpoint, 'list') + @mock.patch('syncano.models.options.Options.get_endpoint_properties') + @mock.patch('syncano.models.manager.Manager._clone') + def test_set_default_properties(self, get_endpoint_mock, clone_mock): + get_endpoint_mock.return_value = ['a', 'b', 'name'] + clone_mock.return_value = self.manager + + instance_name = self.get_name_from_fields() + instance_name.default = 'test_original' + + self.manager._set_default_properties(get_endpoint_mock()) + self.assertDictEqual(self.manager.properties, + {'name': 'test_original'}) + @mock.patch('syncano.models.manager.Manager.list') def test_first(self, list_mock): list_mock.__getitem__.return_value = 1 @@ -272,22 +383,22 @@ def test_limit(self, clone_mock): self.manager.limit('invalid value') @mock.patch('syncano.models.manager.Manager._clone') - def test_order_by(self, clone_mock): + def test_raw(self, clone_mock): clone_mock.return_value = self.manager - self.manager.order_by('field') - self.assertEqual(self.manager.query['order_by'], 'field') - - with self.assertRaises(SyncanoValueError): - self.manager.order_by(10) + self.assertTrue(self.manager._serialize) + self.manager.raw() + self.assertFalse(self.manager._serialize) @mock.patch('syncano.models.manager.Manager._clone') - def test_raw(self, clone_mock): + def test_template(self, clone_mock): clone_mock.return_value = self.manager self.assertTrue(self.manager._serialize) - self.manager.raw() + self.assertIsNone(self.manager._template) + self.manager.template('test') self.assertFalse(self.manager._serialize) + self.assertEqual(self.manager._template, 'test') def test_serialize(self): model = mock.Mock() @@ -320,8 +431,9 @@ def test_request(self, connection_mock): request_mock.assert_called_once_with( 'GET', - u'/v1/instances/', + '/v1.1/instances/', data={'b': 2}, + headers={}, params={'a': 1} ) @@ -372,6 +484,7 @@ def test_get_allowed_method(self): self.manager.endpoint = 'detail' result = self.manager.get_allowed_method('GET', 'POST') + self.assertEqual(result, 'GET') result = self.manager.get_allowed_method('DELETE', 'POST') @@ -381,15 +494,15 @@ def test_get_allowed_method(self): self.manager.get_allowed_method('dummy') -class CodeBoxManagerTestCase(unittest.TestCase): +class ScriptManagerTestCase(unittest.TestCase): def setUp(self): - self.model = CodeBox - self.manager = CodeBox.please + self.model = Script + self.manager = Script.please - @mock.patch('syncano.models.manager.CodeBoxManager.request') - @mock.patch('syncano.models.manager.CodeBoxManager._filter') - @mock.patch('syncano.models.manager.CodeBoxManager._clone') + @mock.patch('syncano.models.manager.ScriptManager.request') + @mock.patch('syncano.models.manager.ScriptManager._filter') + @mock.patch('syncano.models.manager.ScriptManager._clone') def test_run(self, clone_mock, filter_mock, request_mock): clone_mock.return_value = self.manager request_mock.return_value = {'id': 10} @@ -398,7 +511,7 @@ def test_run(self, clone_mock, filter_mock, request_mock): self.assertFalse(request_mock.called) result = self.manager.run(1, 2, a=1, b=2, payload={'x': 1, 'y': 2}) - self.assertIsInstance(result, CodeBoxTrace) + self.assertIsInstance(result, ScriptTrace) self.assertTrue(filter_mock.called) self.assertTrue(request_mock.called) @@ -408,24 +521,24 @@ def test_run(self, clone_mock, filter_mock, request_mock): self.assertEqual(self.manager.method, 'POST') self.assertEqual(self.manager.endpoint, 'run') - self.assertEqual(self.manager.data['payload'], '{"y": 2, "x": 1}') + self.assertDictEqual(json.loads(self.manager.data['payload']), {"y": 2, "x": 1}) -class WebhookManagerTestCase(unittest.TestCase): +class ScriptEndpointManagerTestCase(unittest.TestCase): def setUp(self): - self.model = Webhook - self.manager = Webhook.please + self.model = ScriptEndpoint + self.manager = ScriptEndpoint.please - @mock.patch('syncano.models.manager.WebhookManager.request') - @mock.patch('syncano.models.manager.WebhookManager._filter') - @mock.patch('syncano.models.manager.WebhookManager._clone') + @mock.patch('syncano.models.manager.ScriptEndpointManager.request') + @mock.patch('syncano.models.manager.ScriptEndpointManager._filter') + @mock.patch('syncano.models.manager.ScriptEndpointManager._clone') def test_run(self, clone_mock, filter_mock, request_mock): clone_mock.return_value = self.manager request_mock.return_value = { 'status': 'success', 'duration': 937, - 'result': '1', + 'result': 1, 'executed_at': '2015-03-16T11:52:14.172830Z' } @@ -433,10 +546,10 @@ def test_run(self, clone_mock, filter_mock, request_mock): self.assertFalse(request_mock.called) result = self.manager.run(1, 2, a=1, b=2, payload={'x': 1, 'y': 2}) - self.assertIsInstance(result, WebhookResult) + self.assertIsInstance(result, ScriptEndpointTrace) self.assertEqual(result.status, 'success') self.assertEqual(result.duration, 937) - self.assertEqual(result.result, '1') + self.assertEqual(result.result, 1) self.assertIsInstance(result.executed_at, datetime) self.assertTrue(filter_mock.called) @@ -447,7 +560,7 @@ def test_run(self, clone_mock, filter_mock, request_mock): self.assertEqual(self.manager.method, 'POST') self.assertEqual(self.manager.endpoint, 'run') - self.assertEqual(self.manager.data['payload'], '{"y": 2, "x": 1}') + self.assertDictEqual(json.loads(self.manager.data['payload']), {"y": 2, "x": 1}) class ObjectManagerTestCase(unittest.TestCase): @@ -456,7 +569,7 @@ def setUp(self): self.model = Object self.manager = Object.please - @mock.patch('syncano.models.base.Object.get_subclass_model') + @mock.patch('syncano.models.Object.get_subclass_model') def test_create(self, get_subclass_model_mock): model_mock = mock.MagicMock() model_mock.return_value = model_mock @@ -465,17 +578,16 @@ def test_create(self, get_subclass_model_mock): self.assertFalse(model_mock.called) self.assertFalse(get_subclass_model_mock.called) instance = self.manager.create(a=1, b=2) - self.assertTrue(model_mock.called) self.assertTrue(model_mock.save.called) self.assertTrue(get_subclass_model_mock.called) self.assertEqual(instance, model_mock) - model_mock.assert_called_once_with(a=1, b=2) + model_mock.assert_called_once_with(a=1, b=2, is_lazy=False) model_mock.save.assert_called_once_with() - get_subclass_model_mock.assert_called_once_with(a=1, b=2) + get_subclass_model_mock.assert_called_once_with(a=1, b=2, is_lazy=False) - @mock.patch('syncano.models.base.Object.get_subclass_model') + @mock.patch('syncano.models.Object.get_subclass_model') def test_serialize(self, get_subclass_model_mock): get_subclass_model_mock.return_value = mock.Mock self.manager.properties['instance_name'] = 'test' @@ -487,7 +599,7 @@ def test_serialize(self, get_subclass_model_mock): get_subclass_model_mock.assert_called_once_with(instance_name='test', class_name='test') @mock.patch('syncano.models.manager.ObjectManager._clone') - @mock.patch('syncano.models.base.Object.get_subclass_model') + @mock.patch('syncano.models.Object.get_subclass_model') def test_filter(self, get_subclass_model_mock, clone_mock): get_subclass_model_mock.return_value = Instance clone_mock.return_value = self.manager @@ -502,7 +614,10 @@ def test_filter(self, get_subclass_model_mock, clone_mock): self.assertEqual(self.manager.query['query'], '{"name": {"_gt": "test"}}') self.manager.filter(name__gt='test', description='test') - self.assertEqual(self.manager.query['query'], '{"description": {"_eq": "test"}, "name": {"_gt": "test"}}') + self.assertDictEqual( + json.loads(self.manager.query['query']), + {"description": {"_eq": "test"}, "name": {"_gt": "test"}} + ) with self.assertRaises(SyncanoValueError): self.manager.filter(dummy_field=4) @@ -510,6 +625,48 @@ def test_filter(self, get_subclass_model_mock, clone_mock): with self.assertRaises(SyncanoValueError): self.manager.filter(name__xx=4) + @mock.patch('syncano.models.manager.Manager._clone') + def test_order_by(self, clone_mock): + clone_mock.return_value = self.manager + + self.manager.order_by('field') + self.assertEqual(self.manager.query['order_by'], 'field') + + with self.assertRaises(SyncanoValueError): + self.manager.order_by(10) + + @mock.patch('syncano.models.manager.Manager.request') + @mock.patch('syncano.models.manager.ObjectManager.serialize') + @mock.patch('syncano.models.manager.Manager.iterator') + def test_update(self, iterator_mock, serialize_mock, request_mock): + iterator_mock.return_value = [Object(class_name='test', instance_name='test')] + serialize_mock.return_value = serialize_mock + self.assertFalse(serialize_mock.called) + + self.model.please.list(class_name='test', instance_name='test').update(id=20, fielda=1, fieldb=None) + + self.assertTrue(serialize_mock.called) + serialize_mock.assert_called_once_with( + {'id': 20, 'fielda': 1, 'fieldb': None}, + self.model + ) + + @mock.patch('syncano.models.manager.Manager.request') + @mock.patch('syncano.models.manager.ObjectManager.serialize') + @mock.patch('syncano.models.manager.Manager.iterator') + def test_update_with_filter(self, iterator_mock, serialize_mock, request_mock): + iterator_mock.return_value = [Object(class_name='test', instance_name='test')] + serialize_mock.return_value = serialize_mock + self.assertFalse(serialize_mock.called) + + self.model.please.list(class_name='test', instance_name='test').filter(id=20).update(channel=1, revision=None) + + self.assertTrue(serialize_mock.called) + serialize_mock.assert_called_once_with( + {'channel': 1, 'revision': None}, + self.model + ) + # TODO class SchemaManagerTestCase(unittest.TestCase): diff --git a/tests/test_models.py b/tests/test_models.py index 23ecd79..ae63899 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,9 +1,7 @@ import unittest -from datetime import datetime -from syncano.exceptions import SyncanoValidationError, SyncanoValueError -from syncano.models import (CodeBox, CodeBoxTrace, Instance, Object, Webhook, - WebhookResult) +from syncano.exceptions import SyncanoValidationError +from syncano.models import Instance, registry try: from unittest import mock @@ -15,10 +13,11 @@ class ModelTestCase(unittest.TestCase): def setUp(self): self.model = Instance() + registry.connection.open() def test_init(self): self.assertTrue(hasattr(self.model, '_raw_data')) - self.assertEquals(self.model._raw_data, {}) + self.assertEqual(self.model._raw_data, {}) model = Instance(name='test', dummy_field='dummy') self.assertTrue('name' in model._raw_data) @@ -41,11 +40,11 @@ def test_str(self): self.assertEqual(out, expected) def test_unicode(self): - expected = u'<{0}: {1}>'.format( + expected = '<{0}: {1}>'.format( self.model.__class__.__name__, self.model.pk ) - out = unicode(self.model) + out = str(self.model) self.assertEqual(out, expected) def test_eq(self): @@ -75,7 +74,7 @@ def test_create(self, connection_mock): self.assertTrue(connection_mock.request.called) connection_mock.request.assert_called_with( 'POST', - '/v1/instances/', + '/v1.1/instances/', data={'name': 'test'} ) @@ -93,7 +92,7 @@ def test_update(self, connection_mock): self.assertTrue(connection_mock.request.called) connection_mock.request.assert_called_with( 'PUT', - '/v1/instances/test/', + '/v1.1/instances/test/', data={'name': 'test'} ) @@ -104,13 +103,13 @@ def test_update(self, connection_mock): self.assertTrue(connection_mock.request.called) connection_mock.request.assert_called_with( 'PUT', - '/v1/instances/test/', + '/v1.1/instances/test/', data={'name': 'test'} ) @mock.patch('syncano.models.Instance._get_connection') def test_delete(self, connection_mock): - model = Instance(name='test', links={'self': '/v1/instances/test/'}) + model = Instance(name='test', links={'self': '/v1.1/instances/test/'}) connection_mock.return_value = connection_mock self.assertFalse(connection_mock.called) @@ -120,7 +119,7 @@ def test_delete(self, connection_mock): self.assertTrue(connection_mock.request.called) connection_mock.assert_called_once_with() - connection_mock.request.assert_called_once_with('DELETE', '/v1/instances/test/') + connection_mock.request.assert_called_once_with('DELETE', '/v1.1/instances/test/') model = Instance() with self.assertRaises(SyncanoValidationError): @@ -128,7 +127,7 @@ def test_delete(self, connection_mock): @mock.patch('syncano.models.Instance._get_connection') def test_reload(self, connection_mock): - model = Instance(name='test', links={'self': '/v1/instances/test/'}) + model = Instance(name='test', links={'self': '/v1.1/instances/test/'}) connection_mock.return_value = connection_mock connection_mock.request.return_value = { 'name': 'new_one', @@ -145,7 +144,7 @@ def test_reload(self, connection_mock): self.assertEqual(model.description, 'dummy desc') connection_mock.assert_called_once_with() - connection_mock.request.assert_called_once_with('GET', '/v1/instances/test/') + connection_mock.request.assert_called_once_with('GET', '/v1.1/instances/test/') model = Instance() with self.assertRaises(SyncanoValidationError): @@ -181,206 +180,20 @@ def test_to_native(self): self.model.dummy = 'test' self.assertEqual(self.model.to_native(), {'name': 'test', 'description': 'desc'}) - -class CodeBoxTestCase(unittest.TestCase): - - def setUp(self): - self.model = CodeBox() - - @mock.patch('syncano.models.CodeBox._get_connection') - def test_run(self, connection_mock): - model = CodeBox(instance_name='test', id=10, links={'run': '/v1/instances/test/codeboxes/10/run/'}) + @mock.patch('syncano.models.Instance._get_connection') + def test_save_with_revision(self, connection_mock): connection_mock.return_value = connection_mock - connection_mock.request.return_value = {'id': 10} + connection_mock.request.return_value = {} self.assertFalse(connection_mock.called) self.assertFalse(connection_mock.request.called) - result = model.run(a=1, b=2) - self.assertTrue(connection_mock.called) - self.assertTrue(connection_mock.request.called) - self.assertIsInstance(result, CodeBoxTrace) - - connection_mock.assert_called_once_with(a=1, b=2) - connection_mock.request.assert_called_once_with( - 'POST', '/v1/instances/test/codeboxes/10/run/', data={'payload': '{"a": 1, "b": 2}'} - ) - - model = CodeBox() - with self.assertRaises(SyncanoValidationError): - model.run() + Instance(name='test').save(expected_revision=12) -class ObjectTestCase(unittest.TestCase): - - def setUp(self): - self.schema = [ - { - 'name': 'title', - 'type': 'string', - 'order_index': True, - 'filter_index': True - }, - { - 'name': 'release_year', - 'type': 'integer', - 'order_index': True, - 'filter_index': True - }, - { - 'name': 'price', - 'type': 'float', - 'order_index': True, - 'filter_index': True - }, - { - 'name': 'author', - 'type': 'reference', - 'order_index': True, - 'filter_index': True, - 'target': 'Author' - } - ] - - @mock.patch('syncano.models.base.Object.get_subclass_model') - def test_new(self, get_subclass_model_mock): - get_subclass_model_mock.return_value = Instance - self.assertFalse(get_subclass_model_mock.called) - - with self.assertRaises(SyncanoValueError): - Object() - - with self.assertRaises(SyncanoValueError): - Object(instance_name='dummy') - - self.assertFalse(get_subclass_model_mock.called) - o = Object(instance_name='dummy', class_name='dummy', x=1, y=2) - self.assertIsInstance(o, Instance) - self.assertTrue(get_subclass_model_mock.called) - get_subclass_model_mock.assert_called_once_with('dummy', 'dummy') - - def test_create_subclass(self): - SubClass = Object.create_subclass('Test', self.schema) - fields = [f for f in SubClass._meta.fields if f not in Object._meta.fields] - - self.assertEqual(SubClass.__name__, 'Test') - - for schema, field in zip(self.schema, fields): - query_allowed = ('order_index' in schema or 'filter_index' in schema) - self.assertEqual(schema['name'], field.name) - self.assertEqual(field.query_allowed, query_allowed) - self.assertFalse(field.required) - self.assertFalse(field.read_only) - - @mock.patch('syncano.models.base.registry') - @mock.patch('syncano.models.base.Object.create_subclass') - def test_get_or_create_subclass(self, create_subclass_mock, registry_mock): - create_subclass_mock.return_value = 1 - registry_mock.get_model_by_name.side_effect = [2, LookupError] - - self.assertFalse(registry_mock.get_model_by_name.called) - self.assertFalse(registry_mock.add.called) - self.assertFalse(create_subclass_mock.called) - - model = Object.get_or_create_subclass('test', [{}, {}]) - self.assertEqual(model, 2) - - self.assertTrue(registry_mock.get_model_by_name.called) - self.assertFalse(registry_mock.add.called) - self.assertFalse(create_subclass_mock.called) - registry_mock.get_model_by_name.assert_called_with('test') - - model = Object.get_or_create_subclass('test', [{}, {}]) - self.assertEqual(model, 1) - - self.assertTrue(registry_mock.get_model_by_name.called) - self.assertTrue(registry_mock.add.called) - self.assertTrue(create_subclass_mock.called) - - registry_mock.get_model_by_name.assert_called_with('test') - create_subclass_mock.assert_called_with('test', [{}, {}]) - registry_mock.add.assert_called_with('test', 1) - - self.assertEqual(registry_mock.get_model_by_name.call_count, 2) - self.assertEqual(registry_mock.add.call_count, 1) - self.assertEqual(create_subclass_mock.call_count, 1) - - def test_get_subclass_name(self): - self.assertEqual(Object.get_subclass_name('', ''), 'Object') - self.assertEqual(Object.get_subclass_name('duMMY', ''), 'DummyObject') - self.assertEqual(Object.get_subclass_name('', 'ClS'), 'ClsObject') - self.assertEqual(Object.get_subclass_name('duMMy', 'CLS'), 'DummyClsObject') - - @mock.patch('syncano.models.Manager.get') - def test_get_class_schema(self, get_mock): - get_mock.return_value = get_mock - self.assertFalse(get_mock.called) - result = Object.get_class_schema('dummy-instance', 'dummy-class') - self.assertTrue(get_mock.called) - self.assertEqual(result, get_mock.schema) - get_mock.assert_called_once_with('dummy-instance', 'dummy-class') - - @mock.patch('syncano.models.base.Object.create_subclass') - @mock.patch('syncano.models.base.Object.get_class_schema') - @mock.patch('syncano.models.manager.registry.get_model_by_name') - @mock.patch('syncano.models.base.Object.get_subclass_name') - def test_get_subclass_model(self, get_subclass_name_mock, get_model_by_name_mock, - get_class_schema_mock, create_subclass_mock): - - create_subclass_mock.return_value = create_subclass_mock - get_subclass_name_mock.side_effect = [ - 'Object', - 'DummyObject', - 'DummyObject', - ] - - get_model_by_name_mock.side_effect = [ - Object, - LookupError - ] - - result = Object.get_subclass_model('', '') - self.assertEqual(Object, result) - - result = Object.get_subclass_model('', '') - self.assertEqual(Object, result) - - result = Object.get_subclass_model('', '') - self.assertEqual(create_subclass_mock, result) - - -class WebhookTestCase(unittest.TestCase): - def setUp(self): - self.model = Webhook() - - @mock.patch('syncano.models.Webhook._get_connection') - def test_run(self, connection_mock): - model = Webhook(instance_name='test', slug='slug', links={'run': '/v1/instances/test/webhooks/slug/run/'}) - connection_mock.return_value = connection_mock - connection_mock.request.return_value = { - 'status': 'success', - 'duration': 937, - 'result': '1', - 'executed_at': '2015-03-16T11:52:14.172830Z' - } - - self.assertFalse(connection_mock.called) - self.assertFalse(connection_mock.request.called) - result = model.run(x=1, y=2) self.assertTrue(connection_mock.called) self.assertTrue(connection_mock.request.called) - self.assertIsInstance(result, WebhookResult) - self.assertEqual(result.status, 'success') - self.assertEqual(result.duration, 937) - self.assertEqual(result.result, '1') - self.assertIsInstance(result.executed_at, datetime) - - connection_mock.assert_called_once_with(x=1, y=2) - connection_mock.request.assert_called_once_with( + connection_mock.request.assert_called_with( 'POST', - '/v1/instances/test/webhooks/slug/run/', - data={'payload': '{"y": 2, "x": 1}'} + '/v1.1/instances/', + data={'name': 'test', 'expected_revision': 12} ) - - model = Webhook() - with self.assertRaises(SyncanoValidationError): - model.run() diff --git a/tests/test_options.py b/tests/test_options.py index 438bc94..5bef5dd 100644 --- a/tests/test_options.py +++ b/tests/test_options.py @@ -1,8 +1,7 @@ import unittest -from syncano.exceptions import SyncanoValueError -from syncano.models.base import Instance -from syncano.models.fields import Field +from syncano.exceptions import SyncanoValidationError, SyncanoValueError +from syncano.models import Field, Instance from syncano.models.options import Options @@ -16,15 +15,15 @@ class Meta: endpoints = { 'detail': { 'methods': ['delete', 'post', 'patch', 'get'], - 'path': '/v1/dummy/{name}/', + 'path': '/v1.1/dummy/{name}/', }, 'list': { 'methods': ['post', 'get'], - 'path': '/v1/dummy/', + 'path': '/v1.1/dummy/', }, 'dummy': { 'methods': ['post', 'get'], - 'path': '/v1/dummy/{a}/{b}/', + 'path': '/v1.1/dummy/{a}/{b}/', 'properties': ['a', 'b'] } } @@ -139,7 +138,7 @@ def test_resolve_endpoint(self): properties = {'instance_name': 'test', 'a': 'a', 'b': 'b'} path = self.options.resolve_endpoint('dummy', properties) - self.assertEqual(path, '/v1/instances/test/v1/dummy/a/b/') + self.assertEqual(path, '/v1.1/instances/test/v1.1/dummy/a/b/') def test_get_endpoint_query_params(self): properties = {'instance_name': 'test', 'x': 'y'} @@ -150,3 +149,13 @@ def test_get_path_properties(self): path = '/{a}/{b}-{c}/dummy-{d}/' properties = self.options.get_path_properties(path) self.assertEqual(properties, ['a', 'b']) + + def test_resolve_endpoint_with_missing_http_method(self): + properties = {'instance_name': 'test'} + with self.assertRaises(SyncanoValidationError): + self.options.resolve_endpoint('list', properties, 'DELETE') + + def test_resolve_endpoint_with_specified_http_method(self): + properties = {'instance_name': 'test', 'a': 'a', 'b': 'b'} + path = self.options.resolve_endpoint('dummy', properties, 'GET') + self.assertEqual(path, '/v1.1/instances/test/v1.1/dummy/a/b/') diff --git a/tests/test_push.py b/tests/test_push.py new file mode 100644 index 0000000..598bb9c --- /dev/null +++ b/tests/test_push.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +import json +import unittest + +from mock import mock +from syncano.models import APNSDevice, APNSMessage, GCMDevice, GCMMessage + + +class ScriptTestCase(unittest.TestCase): + + @mock.patch('syncano.models.GCMDevice._get_connection') + def test_gcm_device(self, connection_mock): + model = GCMDevice( + instance_name='test', + label='example label', + registration_id=86152312314401555, + device_id='10000000001', + ) + + connection_mock.return_value = connection_mock + connection_mock.request.return_value = {'registration_id': 86152312314401555} + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + + model.save() + + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + + connection_mock.assert_called_once_with() + connection_mock.request.assert_called_once_with( + 'POST', '/v1.1/instances/test/push_notifications/gcm/devices/', + data={"registration_id": '86152312314401555', "device_id": "10000000001", "is_active": True, + "label": "example label"} + ) + model.links = 'something' # to Falsify is_new() + model.delete() + connection_mock.request.assert_called_with( + 'DELETE', '/v1.1/instances/test/push_notifications/gcm/devices/86152312314401555/' + ) + + @mock.patch('syncano.models.APNSDevice._get_connection') + def test_apns_device(self, connection_mock): + # just mock test - values here should be different; + model = APNSDevice( + instance_name='test', + label='example label', + registration_id=86152312314401555, + device_id='10000000001', + ) + + connection_mock.return_value = connection_mock + connection_mock.request.return_value = {'registration_id': 86152312314401555} + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + + model.save() + + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + + connection_mock.assert_called_once_with() + connection_mock.request.assert_called_once_with( + 'POST', '/v1.1/instances/test/push_notifications/apns/devices/', + data={"registration_id": '86152312314401555', "device_id": "10000000001", "is_active": True, + "label": "example label"} + ) + + model.links = 'something' # to Falsify is_new() + model.delete() + connection_mock.request.assert_called_with( + 'DELETE', '/v1.1/instances/test/push_notifications/apns/devices/86152312314401555/' + ) + + @mock.patch('syncano.models.GCMMessage._get_connection') + def test_gcm_message(self, connection_mock): + model = GCMMessage( + instance_name='test', + content={'data': 'some data'} + ) + connection_mock.return_value = connection_mock + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + + model.save() + + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + + connection_mock.assert_called_once_with() + + call_args = connection_mock.request.call_args[0] + call_kwargs = connection_mock.request.call_args[1] + + call_kwargs['data']['content'] = json.loads(call_kwargs['data']['content']) + + self.assertEqual(('POST', '/v1.1/instances/test/push_notifications/gcm/messages/'), call_args) + self.assertDictEqual( + {'data': {'content': {"environment": "production", "data": "some data"}}}, + call_kwargs, + ) + + @mock.patch('syncano.models.APNSMessage._get_connection') + def test_apns_message(self, connection_mock): + model = APNSMessage( + instance_name='test', + content={'data': 'some data'} + ) + connection_mock.return_value = connection_mock + + self.assertFalse(connection_mock.called) + self.assertFalse(connection_mock.request.called) + + model.save() + + self.assertTrue(connection_mock.called) + self.assertTrue(connection_mock.request.called) + + connection_mock.assert_called_once_with() + call_args = connection_mock.request.call_args[0] + call_kwargs = connection_mock.request.call_args[1] + call_kwargs['data']['content'] = json.loads(call_kwargs['data']['content']) + self.assertEqual(('POST', '/v1.1/instances/test/push_notifications/apns/messages/'), call_args) + self.assertDictEqual(call_kwargs['data'], {'content': {"environment": "production", "data": "some data"}}) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..fc2d2cf --- /dev/null +++ b/tox.ini @@ -0,0 +1,6 @@ +[tox] +envlist = py27,py34 +[testenv] +passenv = INTEGRATION_API_ROOT INTEGRATION_API_KEY INTEGRATION_API_EMAIL INTEGRATION_API_PASSWORD INTEGRATION_INSTANCE_NAME INTEGRATION_USER_NAME INTEGRATION_USER_PASSWORD +deps= -rrequirements.txt +commands=./run_tests.sh \ No newline at end of file