diff --git a/README.rst b/README.rst index db86fd8d6..f67a52098 100644 --- a/README.rst +++ b/README.rst @@ -32,7 +32,8 @@ version by issuing the following incantation ~~~~~~~~~~~~~ Prerequisites ~~~~~~~~~~~~~ -.. |libcouchbase_version| replace:: 2.9.0 +.. |libcouchbase_version| replace:: 2.10.0 + - Couchbase Server (http://couchbase.com/download) - libcouchbase_. version |libcouchbase_version| or greater (Bundled in Windows installer) - libcouchbase development files. diff --git a/couchbase/admin.py b/couchbase/admin.py index 348f1d3d9..b82c4dfa8 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -222,6 +222,34 @@ def bucket_remove(self, name): bucket_delete = bucket_remove + class BucketInfo(object): + """ + Information about a bucket + """ + def __init__(self, + raw_json # type: JSON + ): + self.raw_json = raw_json + + def name(self): + """ + Name of the bucket. + :return: A :class:`str` containing the bucket name. + """ + return self.raw_json.get("name") + + def __str__(self): + return "Bucket named {}".format(self.name) + + def buckets_list(self): + """ + Retrieve the list of buckets from the server + :return: An iterable of :Class:`Admin.BucketInfo` objects describing + the buckets currently active on the cluster. + """ + buckets_list = self.http_request(path='/pools/default/buckets', method='GET') + return map(Admin.BucketInfo, buckets_list.value) + def bucket_info(self, name): """ Retrieve information about the bucket. diff --git a/couchbase/bucket.py b/couchbase/bucket.py index e399530be..9a70cf84a 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -36,10 +36,14 @@ import json from couchbase.analytics import AnalyticsRequest, AnalyticsQuery from couchbase.connstr import ConnectionString - +from enum import IntEnum ### Private constants. This is to avoid imposing a dependency requirement ### For simple flags: +class PingStatus(IntEnum): + OK=_LCB.LCB_PINGSTATUS_OK + TIMEOUT=_LCB.LCB_PINGSTATUS_TIMEOUT + ERROR=_LCB.LCB_PINGSTATUS_ERROR def _depr(fn, usage, stacklevel=3): """Internal convenience function for deprecation warnings""" @@ -933,8 +937,17 @@ def ping(self): :raise: :exc:`.CouchbaseNetworkError` - :return: `dict` where keys are stat keys and values are - host-value pairs + :return: `dict` where keys are service types and values are + lists of dictionaries, each one describing a single + node. + + The 'status' entry of each value corresponds to an integer enum: + + PingStatus.OK(0) = ping responded in time + + PingStatus.TIMEOUT(1) = ping timed out + + PingStatus.ERROR(2) = there was some other error while trying to ping the host. Ping cluster (works on couchbase buckets):: @@ -1504,7 +1517,7 @@ def n1ql_query(self, query, *args, **kwargs): otherwise defaulting to :class:`~.N1QLRequest`. :param query: The query to execute. This may either be a - :class:`.N1QLQuery` object, or a string (which will be + :class:`~.N1QLQuery` object, or a string (which will be implicitly converted to one). :param kwargs: Arguments for :class:`.N1QLRequest`. :return: An iterator which yields rows. Each row is a dictionary @@ -1736,6 +1749,32 @@ def compression(self): def compression(self, value): self._cntl(_LCB.LCB_CNTL_COMPRESSION_OPTS, value_type='int', value=value) + @property + def compression_min_size(self): + """ + Minimum size (in bytes) of the document payload to be compressed when compression enabled. + + :type: int + """ + return self._cntl(_LCB.LCB_CNTL_COMPRESSION_MIN_SIZE, value_type='uint32_t') + + @compression_min_size.setter + def compression_min_size(self, value): + self._cntl(_LCB.LCB_CNTL_COMPRESSION_MIN_SIZE, value_type='uint32_t', value=value) + + @property + def compression_min_ratio(self): + """ + Minimum compression ratio (compressed / original) of the compressed payload to allow sending it to cluster. + + :type: float + """ + return self._cntl(_LCB.LCB_CNTL_COMPRESSION_MIN_RATIO, value_type='float') + + @compression_min_ratio.setter + def compression_min_ratio(self, value): + self._cntl(_LCB.LCB_CNTL_COMPRESSION_MIN_RATIO, value_type='float', value=value) + @property def is_ssl(self): """ diff --git a/couchbase/cluster.py b/couchbase/cluster.py index 8dfc40066..e4d987162 100644 --- a/couchbase/cluster.py +++ b/couchbase/cluster.py @@ -56,6 +56,7 @@ def __init__(self, connection_string='couchbase://localhost', bucket_class=Bucket): """ Creates a new Cluster object + :param connection_string: Base connection string. It is an error to specify a bucket in the string. :param bucket_class: :class:`couchbase.bucket.Bucket` implementation to @@ -75,6 +76,7 @@ def authenticate(self, authenticator=None, username=None, password=None): """ Set the type of authenticator to use when opening buckets or performing cluster management operations + :param authenticator: The new authenticator to use :param username: The username to authenticate with :param password: The password to authenticate with @@ -92,6 +94,7 @@ def open_bucket(self, bucket_name, **kwargs): # type: (str, str) -> Bucket """ Open a new connection to a Couchbase bucket + :param bucket_name: The name of the bucket to open :param kwargs: Additional arguments to provide to the constructor :return: An instance of the `bucket_class` object provided to @@ -301,8 +304,11 @@ def _warning(self, clash_param_dict, auth_type): def cluster_manager(self): """ - Returns an instance of :class:`~.couchbase.admin.Admin` which may be + Returns an object which may be used to create and manage buckets in the cluster. + + :returns: the cluster manager + :rtype: couchbase.admin.Admin """ credentials = self.authenticator.get_credentials()['options'] connection_string = str(self.connstr) @@ -312,6 +318,7 @@ def n1ql_query(self, query, *args, **kwargs): """ Issue a "cluster-level" query. This requires that at least one connection to a bucket is active. + :param query: The query string or object :param args: Additional arguments to :cb_bmeth:`n1ql_query` @@ -337,7 +344,29 @@ def n1ql_query(self, query, *args, **kwargs): raise NoBucketError('Must have at least one active bucket for query') +def _recursive_creds_merge(base, overlay): + for k, v in overlay.items(): + base_k = base.get(k, None) + if not base_k: + base[k] = v + continue + if isinstance(v, dict): + if isinstance(base_k, dict): + base[k] = _recursive_creds_merge(base_k, v) + else: + raise Exception("Cannot merge dict and {}".format(v)) + else: + raise Exception("Cannot merge non dicts") + return base + + class Authenticator(object): + def __init__(self, cert_path = None): + """ + :param cert_path: Path for SSL certificate (last in chain if multiple) + """ + self._cert_path = cert_path + def get_credentials(self, bucket=None): """ Gets the credentials for a specified bucket. If bucket is @@ -374,23 +403,27 @@ def get_unique_creds_dict(cls): """ return {} - def get_cred_bucket(self, bucket): + def _base_options(self, bucket, overlay): + base_dict = {'options': {'cert_path': self._cert_path} if self._cert_path else {}} + return _recursive_creds_merge(base_dict, overlay) + + def get_cred_bucket(self, bucket, **overlay): """ :param bucket: :return: returns the non-unique parts of the credentials for bucket authentication, as a dictionary of functions, e.g.: 'options': {'username': self.username}, 'scheme': 'couchbases'} """ - raise NotImplementedError() + return self._base_options(bucket, overlay) - def get_cred_not_bucket(self): + def get_cred_not_bucket(self, **overlay): """ :param bucket: :return: returns the non-unique parts of the credentials for admin access as a dictionary of functions, e.g.: {'options':{'password': self.password}} """ - raise NotImplementedError() + return self._base_options(None, overlay) def get_auto_credentials(self, bucket): """ @@ -408,7 +441,7 @@ def get_auto_credentials(self, bucket): class PasswordAuthenticator(Authenticator): - def __init__(self, username, password): + def __init__(self, username, password, cert_path=None): """ This class uses a single credential pair of username and password, and is designed to be used either with cluster management operations or @@ -419,19 +452,23 @@ def __init__(self, username, password): :param username: :param password: + :param cert_path: + Path of the CA key .. warning:: This functionality is experimental both in API and implementation. """ + super(PasswordAuthenticator,self).__init__(cert_path=cert_path) self.username = username self.password = password - def get_cred_bucket(self, *unused): - return {'options': {'username': self.username, 'password': self.password}} + def get_cred_bucket(self, bucket, **overlay): + return self.get_cred_not_bucket(**overlay) - def get_cred_not_bucket(self): - return self.get_cred_bucket() + def get_cred_not_bucket(self, **overlay): + merged = _recursive_creds_merge({'options': {'username': self.username, 'password': self.password}}, overlay) + return super(PasswordAuthenticator, self).get_cred_not_bucket(**merged) @classmethod def unwanted_keys(cls): @@ -441,7 +478,8 @@ def unwanted_keys(cls): class ClassicAuthenticator(Authenticator): def __init__(self, cluster_username=None, cluster_password=None, - buckets=None): + buckets=None, + cert_path=None): """ Classic authentication mechanism. :param cluster_username: @@ -450,16 +488,20 @@ def __init__(self, cluster_username=None, Global cluster password. Only required for management operations :param buckets: A dictionary of `{bucket_name: bucket_password}`. + :param cert_path: + Path of the CA key """ + super(ClassicAuthenticator, self).__init__(cert_path=cert_path) self.username = cluster_username self.password = cluster_password self.buckets = buckets if buckets else {} def get_cred_not_bucket(self): - return {'options': {'username': self.username, 'password': self.password}} + return super(ClassicAuthenticator, self).get_cred_not_bucket(**{'options': {'username': self.username, 'password': self.password}}) - def get_cred_bucket(self, bucket): - return {'options': {'password': self.buckets.get(bucket)}} + def get_cred_bucket(self, bucket, **overlay): + merged=_recursive_creds_merge({'options': {'password': self.buckets.get(bucket)}}, overlay) + return super(ClassicAuthenticator, self).get_cred_bucket(bucket, **merged) class CertAuthenticator(Authenticator): @@ -479,22 +521,26 @@ def __init__(self, cert_path=None, key_path=None, trust_store_path=None, cluster :param trust_store_path: Path of the certificate trust store. """ + super(CertAuthenticator, self).__init__(cert_path=cert_path) + self.username = cluster_username self.password = cluster_password - self.certpath = cert_path self.keypath = key_path self.trust_store_path = trust_store_path @classmethod def get_unique_creds_dict(clazz): - return {'certpath': lambda self: self.certpath, 'keypath': lambda self: self.keypath, - 'truststorepath': lambda self: self.trust_store_path} + return { 'keypath': lambda self: self.keypath, + 'truststorepath': lambda self: self.trust_store_path} - def get_cred_bucket(self, *unused): - return {'options': {'username': self.username}, 'scheme': 'couchbases'} + def get_cred_bucket(self, bucket, **overlay): + merged = _recursive_creds_merge( + {'options': {'username': self.username}, 'scheme': 'couchbases'}, + overlay) + return super(CertAuthenticator, self).get_cred_bucket(bucket, **merged) def get_cred_not_bucket(self): - return {'options': {'password': self.password}} + return super(CertAuthenticator, self).get_cred_not_bucket(**{'options': {'password': self.password}}) @classmethod def unwanted_keys(cls): diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index cc2d8b3b1..1179dce42 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -21,10 +21,11 @@ from couchbase.views.iterator import AlreadyQueriedError from couchbase.exceptions import CouchbaseError import sys +from typing import Iterator # Not used internally, but by other modules from couchbase.mutation_state import MutationState, MissingTokenError - +from couchbase import JSON class N1QLError(CouchbaseError): @property @@ -428,10 +429,22 @@ def raw(self): @property def meta(self): + """ + The metadata as a property + + :return: the query metadata + :rtype: JSON + """ return self.meta_retrieve() @property def metrics(self): + """ + Get query metrics from the metadata + + :return: a dictionary containing the metrics metadata + :rtype: JSON + """ return self.meta_retrieve().get('metrics', None) def meta_retrieve(self, meta_lookahead = None): @@ -512,7 +525,12 @@ def get_single_result(self): return r def __iter__(self): - # type: ()->JSON + # type: ()->Iterator[JSON] + """ + An iterator through the results. + + :returns: Iterator[JSON] + """ if self.buffered_remainder: while len(self.buffered_remainder)>0: yield self.buffered_remainder.pop(0) diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 32cface98..6c1315d4a 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -559,6 +559,17 @@ def setUp(self, **kwargs): super(ConnectionTestCaseBase, self).setUp() self.cb = self.make_connection(**kwargs) + def sleep(self, duration): + expected_end=time.time()+duration + while True: + remaining_time=expected_end-time.time() + if remaining_time<=0: + break + try: + self.cb.get("dummy",ttl=remaining_time) + except: + pass + def tearDown(self): super(ConnectionTestCaseBase, self).tearDown() if hasattr(self, '_implDtorHook'): diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index 69de5d46c..7ed8cbd60 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -30,7 +30,7 @@ import couchbase import time - +import json class AdminSimpleTest(CouchbaseTestCase): def setUp(self): super(AdminSimpleTest, self).setUp() @@ -70,6 +70,18 @@ def test_bucket_param(self): bucket='default') self.assertIsNotNone(admin) + def test_bucket_list(self): + buckets_to_add = {'fred': {}, 'jane': {}, 'sally': {}} + try: + for bucket, kwargs in buckets_to_add.items(): + self.admin.bucket_create(bucket, bucket_password='password', **kwargs) + + self.assertEqual(set(), {"fred", "jane", "sally"}.difference( + set(map(Admin.BucketInfo.name, self.admin.buckets_list())))) + finally: + for bucket, kwargs in buckets_to_add.items(): + self.admin.bucket_remove(bucket) + def test_bad_request(self): self.assertRaises(HTTPError, self.admin.http_request, '/badpath') diff --git a/couchbase/tests/cases/cluster_t.py b/couchbase/tests/cases/cluster_t.py index b8fe292aa..ccde2565d 100644 --- a/couchbase/tests/cases/cluster_t.py +++ b/couchbase/tests/cases/cluster_t.py @@ -17,10 +17,15 @@ from unittest import SkipTest +from couchbase.exceptions import CouchbaseFatalError, CouchbaseInputError, CouchbaseNetworkError from couchbase.tests.base import CouchbaseTestCase from couchbase.connstr import ConnectionString from couchbase.cluster import Cluster, ClassicAuthenticator,PasswordAuthenticator, NoBucketError, MixedAuthError, CertAuthenticator import gc +import os +import warnings + +CERT_PATH = os.getenv("PYCBC_CERT_PATH") class ClusterTest(CouchbaseTestCase): @@ -170,3 +175,42 @@ def test_can_authenticate_with_username_password(self): bucket = cluster.open_bucket(bucket_name) self.assertIsNotNone(bucket) + + def _test_allow_cert_path_with_SSL_mock_errors(self, func, *args, **kwargs): + try: + func(*args,**kwargs) + except Exception as e: + if self.is_realserver and CERT_PATH: + raise + try: + raise e + except CouchbaseNetworkError as f: + self.assertRegex(str(e),r'.*(refused the connection).*') + except CouchbaseFatalError as f: + self.assertRegex(str(e),r'.*(SSL subsystem).*') + except CouchbaseInputError as f: + self.assertRegex(str(e),r'.*(not supported).*') + warnings.warn("Got exception {} but acceptable error for Mock with SSL+cert_path tests".format(str(e))) + + def test_can_authenticate_with_cert_path_and_username_password_via_PasswordAuthenticator(self): + cluster = Cluster( + 'couchbases://{host}?certpath={certpath}'.format(host=self.cluster_info.host, certpath=CERT_PATH)) + authenticator = PasswordAuthenticator(self.cluster_info.admin_username, self.cluster_info.admin_password) + cluster.authenticate(authenticator) + self._test_allow_cert_path_with_SSL_mock_errors(cluster.open_bucket, self.cluster_info.bucket_name) + + def test_can_authenticate_with_cert_path_and_username_password_via_ClassicAuthenticator(self): + cluster = Cluster( + 'couchbases://{host}?certpath={certpath}'.format(host=self.cluster_info.host, certpath=CERT_PATH)) + authenticator = ClassicAuthenticator(buckets={self.cluster_info.bucket_name: self.cluster_info.bucket_password}, + cluster_username=self.cluster_info.admin_username, + cluster_password=self.cluster_info.admin_password) + cluster.authenticate(authenticator) + self._test_allow_cert_path_with_SSL_mock_errors(cluster.open_bucket, self.cluster_info.bucket_name) + + def test_can_authenticate_with_cert_path_and_username_password_via_kwargs(self): + cluster = Cluster( + 'couchbases://{host}?certpath={certpath}'.format(host=self.cluster_info.host, certpath=CERT_PATH)) + self._test_allow_cert_path_with_SSL_mock_errors(cluster.open_bucket, self.cluster_info.bucket_name, + username=self.cluster_info.admin_username, + password=self.cluster_info.admin_password) diff --git a/couchbase/tests/cases/diag_t.py b/couchbase/tests/cases/diag_t.py index 61c0bdb80..23644f167 100644 --- a/couchbase/tests/cases/diag_t.py +++ b/couchbase/tests/cases/diag_t.py @@ -21,6 +21,7 @@ import jsonschema import re import couchbase._libcouchbase as LCB +from couchbase.bucket import PingStatus # For Python 2/3 compatibility try: @@ -32,7 +33,8 @@ "properties": {"details": {"type": "string"}, "latency": {"anyOf": [{"type": "number"}, {"type": "string"}]}, "server": {"type": "string"}, - "status": {"type": "number"} + "status": {"type": "number", + "enum": list(PingStatus)} }, "required": ["latency", "server", "status"]} diff --git a/couchbase/tests/cases/misc_t.py b/couchbase/tests/cases/misc_t.py index 66a188087..67c48c8f1 100644 --- a/couchbase/tests/cases/misc_t.py +++ b/couchbase/tests/cases/misc_t.py @@ -29,7 +29,7 @@ from couchbase.tests.base import ConnectionTestCaseBase from couchbase.user_constants import FMT_JSON, FMT_AUTO, FMT_JSON, FMT_PICKLE -from couchbase.exceptions import ClientTemporaryFailError +from couchbase.exceptions import ClientTemporaryFailError, InternalSDKError, ArgumentError, CouchbaseInputError from couchbase.exceptions import CouchbaseError import couchbase import re @@ -37,6 +37,7 @@ from couchbase import enable_logging from couchbase import COMPRESS_INOUT import logging +import time class MiscTest(ConnectionTestCaseBase): @@ -228,18 +229,58 @@ def test_multi_auth(self): def test_compression(self): import couchbase._libcouchbase as _LCB items = list(_LCB.COMPRESSION.items()) - for entry in range(0, len(items)*2): + for entry in range(0, len(items) * 2): connstr, cntl = items[entry % len(items)] print(connstr + "," + str(cntl)) - cb = self.make_connection(compression=connstr) - self.assertEqual(cb.compression, cntl) - value = "world" + str(entry) - cb.upsert("hello", value) - cb.compression = items[(entry + 1) % len(items)][1] - self.assertEqual(value, cb.get("hello").value) - cb.remove("hello") + sends_compressed = self.send_compressed(entry) + for min_size in [0, 31, 32] if sends_compressed else [None]: + for min_ratio in [0, 0.5] if sends_compressed else [None]: + def set_comp(): + cb.compression_min_size = min_size + + cb = self.make_connection(compression=connstr) + if min_size: + if min_size < 32: + self.assertRaises(CouchbaseInputError, set_comp) + else: + set_comp() + + if min_ratio: + cb.compression_min_ratio = min_ratio + self.assertEqual(cb.compression, cntl) + value = "world" + str(entry) + cb.upsert("hello", value) + cb.compression = items[(entry + 1) % len(items)][1] + self.assertEqual(value, cb.get("hello").value) + cb.remove("hello") + + @staticmethod + def send_compressed(entry): + return entry in map(_LCB.__getattribute__, ('COMPRESS_FORCE', 'COMPRESS_INOUT', 'COMPRESS_OUT')) def test_compression_named(self): import couchbase._libcouchbase as _LCB cb = self.make_connection() cb.compression =couchbase.COMPRESS_INOUT + + def test_consistency_check_pyexception(self): + items = {str(k): str(v) for k, v in zip(range(0, 100), range(0, 100))} + self.cb.upsert_multi(items) + self.cb.get_multi(items.keys()) + self.cb.check_type = _LCB.PYCBC_CHECK_FAIL + + for x in range(0, 10): + init_time = time.time() + exception = None + while (time.time() - init_time) < 10: + try: + self.cb.get_multi(items.keys()) + except Exception as e: + exception = e + break + + def raiser(): + raise exception + + self.assertRaisesRegex(InternalSDKError, r'self->nremaining!=0, resetting to 0', raiser) + diff --git a/couchbase/tests/cases/touch_t.py b/couchbase/tests/cases/touch_t.py index 19bd03619..37e9ffa97 100644 --- a/couchbase/tests/cases/touch_t.py +++ b/couchbase/tests/cases/touch_t.py @@ -16,6 +16,7 @@ # import time +from flaky import flaky from nose.plugins.attrib import attr from couchbase.tests.base import ConnectionTestCase @@ -28,26 +29,28 @@ def setUp(self): super(TouchTest, self).setUp() self.cb = self.make_connection() + @flaky(10, 1) def test_trivial_touch(self): key = self.gen_key("trivial_touch") self.cb.upsert(key, "value", ttl=1) rv = self.cb.touch(key, ttl=0) self.assertTrue(rv.success) - time.sleep(2) + self.sleep(2) rv = self.cb.get(key) self.assertTrue(rv.success) self.assertEqual(rv.value, "value") self.cb.touch(key, ttl=1) - time.sleep(2) + self.sleep(2) rv = self.cb.get(key, quiet=True) self.assertFalse(rv.success) self.assertTrue(E.NotFoundError._can_derive(rv.rc)) + @flaky(10, 1) def test_trivial_multi_touch(self): kv = self.gen_kv_dict(prefix="trivial_multi_touch") self.cb.upsert_multi(kv, ttl=1) - time.sleep(2) + self.sleep(10) rvs = self.cb.get_multi(kv.keys(), quiet=True) self.assertFalse(rvs.all_ok) @@ -57,10 +60,11 @@ def test_trivial_multi_touch(self): self.assertTrue(rvs.all_ok) self.cb.touch_multi(kv.keys(), ttl=1) - time.sleep(2) + self.sleep(10) rvs = self.cb.get_multi(kv.keys(), quiet=True) self.assertFalse(rvs.all_ok) + @flaky(10, 1) def test_dict_touch_multi(self): k_missing = self.gen_key("dict_touch_multi_missing") k_existing = self.gen_key("dict_touch_multi_existing") @@ -69,10 +73,10 @@ def test_dict_touch_multi(self): {k_missing : "missing_val", k_existing : "existing_val"}) self.cb.touch_multi({k_missing : 1, k_existing : 3}) - time.sleep(2) + self.sleep(2) rvs = self.cb.get_multi([k_missing, k_existing], quiet=True) self.assertTrue(rvs[k_existing].success) self.assertFalse(rvs[k_missing].success) - time.sleep(2) + self.sleep(2) rv = self.cb.get(k_existing, quiet=True) self.assertFalse(rv.success) diff --git a/couchbase_version.py b/couchbase_version.py index f0411617d..e0869a9e6 100755 --- a/couchbase_version.py +++ b/couchbase_version.py @@ -20,7 +20,7 @@ class MalformedGitTag(Exception): pass -RE_XYZ = re.compile(r'(\d+)\.(\d+)\.(\d)(?:-(.*))?') +RE_XYZ = re.compile(r'(\d+)\.(\d+)\.(\d+)(?:-(.*))?') VERSION_FILE = os.path.join( os.path.dirname(__file__), diff --git a/dev_requirements.txt b/dev_requirements.txt index 2a469dd6d..58e718376 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,8 +1,8 @@ requests nose==1.3.0 -pbr==1.8.1 -numpydoc==0.4 -sphinx==1.6.4 +pbr==3.0.0 +numpydoc==0.9.1 +sphinx==1.8.5 testresources>=0.2.7 jsonschema==2.6.0 configparser==3.5.0 @@ -14,3 +14,7 @@ configparser2 parameterized==0.6.1 utilspie==0.1.0 python-vagrant>=0.5.15 +beautifulsoup4==4.7.1 +jira==2.0.0 +flaky>=3.6.1 +coverage>=4.5.4,<5.0.0 diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index c4f7e3e6a..2f8a59eda 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -1,3 +1,12 @@ +================= +Cluster object +================= + +.. module:: couchbase.cluster + +.. autoclass:: Cluster + :members: + ================= Bucket object ================= @@ -303,6 +312,13 @@ N1QL Query Methods .. automethod:: n1ql_query +Analytics Query Methods +======================= + +.. currentmodule:: couchbase.bucket +.. class:: Bucket + + .. automethod:: analytics_query Full-Text Search Methods ======================== @@ -472,6 +488,10 @@ Attributes .. autoattribute:: compression + .. autoattribute:: compression_min_size + + .. autoattribute:: compression_min_ratio + .. attribute:: default_format Specify the default format (default: :const:`~couchbase.FMT_JSON`) diff --git a/docs/source/relnotes.py b/docs/source/relnotes.py new file mode 100644 index 000000000..dbf1c86d9 --- /dev/null +++ b/docs/source/relnotes.py @@ -0,0 +1,75 @@ +from collections import defaultdict +from bs4 import BeautifulSoup +import requests +import os.path +import re +from jira import JIRA +import argparse +import datetime + +server = "https://issues.couchbase.com" +options = dict(server=server) +jira = JIRA(options) +project_code = "PYCBC" +project = jira.project(project_code) +print("got project {}".format(project.versions)) +parser = argparse.ArgumentParser(description="Generate release notes in Asciidoc format") +parser.add_argument('version',type=str) +args=parser.parse_args() +ver_num = args.version +project_version = next(iter(filter(lambda x: x.name == ver_num, project.versions)), None) +relnotes_raw = requests.get( + "{}/secure/ReleaseNote.jspa?projectId={}&version={}".format(server, project.id, + project_version.id)) +soup = BeautifulSoup(relnotes_raw.text, 'html.parser') +content = soup.find("section", class_="aui-page-panel-content") +outputdir = os.path.join("build") + +date = datetime.date.today().strftime("%B {day} %Y").format(day=datetime.date.today().day) +try: + os.makedirs(outputdir) +except: + pass +with open(os.path.join(outputdir, "relnotes.adoc"), "w+") as outputfile: + section_type = None + result = defaultdict(lambda: []) + mapping = {"Task": "Enhancements", + "Improvement": "Enhancements", + "New Feature": "Enhancements", + "Bug": "Fixes"} + version = re.match(r'^(.*)Version ([0-9]+\.[0-9]+\.[0-9]+).*$', content.title.text).group(2) + print("got version {}".format(version)) + for entry in content.body.find_all(): + if re.match(r'h[0-9]+', entry.name): + print("Got section :{}".format(entry.text)) + section_type = mapping.get(entry.text.strip(), None) + if re.match("Edit/Copy Release Notes", entry.text): + break + else: + if section_type: + items = entry.find_all('li') + output = [] + for item in items: + link = item.find('a') + output.append(dict(link=link.get('href'), issuenumber=link.text, + description=re.sub(r'^(.*?)- ', '', item.text))) + result[section_type] += output + + output = """ +== Python SDK {version} Release Notes ({date}) + +[source,bash] +---- +pip install couchbase=={version} +---- + +*API Docs:* http://docs.couchbase.com/sdk-api/couchbase-python-client-{version}/ + +{contents} +""".format(version=version, date=date, contents='\n'.join( + "=== {type}\n\n{value}".format(type=type, value='\n'.join( + """* {link}[{issuenumber}]: +{description}\n""".format(**item) for item in value)) for type, value in result.items())) + + print(output) + outputfile.write(output) diff --git a/setup.py b/setup.py index a7085b781..f4c563980 100644 --- a/setup.py +++ b/setup.py @@ -181,14 +181,22 @@ def comp_option_pattern(prefix): cmake_build=os.environ.get("PYCBC_CMAKE_BUILD") # Dummy dependency to prevent installation of Python < 3 package on Windows. - +try: + pipversion = pip.__version__ +except: + pipversion = "0.0.0" pip_not_on_win_python_lt_3 = ( - ["pip>=9.0; (sys_platform != 'win32' and python_version >= '2.7') or (python_version >= '3.0')"] - if pip.__version__ >= "9.0.0" + ['pip>=9.0; (sys_platform != "win32" and python_version >= "2.7") or (python_version >= "3.0")'] + if pipversion >= "9.0.0" else []) conan_and_cmake_deps = (['conan', 'cmake>=3.0.2'] if cmake_build and sys.platform.startswith('darwin') else []) +gen_reqs = [] +if sys.version_info < (3, 7): + gen_reqs += ['typing'] +if sys.version_info < (3, 5): + gen_reqs += ['enum34'] setup( name = 'couchbase', @@ -230,8 +238,8 @@ def comp_option_pattern(prefix): 'acouchbase.py34only' ] if sys.version_info >= (3, 4) else []), package_data=pkgdata, - setup_requires=['typing'] + conan_and_cmake_deps, - install_requires=['typing'] + pip_not_on_win_python_lt_3, + setup_requires=gen_reqs + conan_and_cmake_deps, + install_requires=gen_reqs + pip_not_on_win_python_lt_3, tests_require=['nose', 'testresources>=0.2.7', 'basictracer==2.2.0'], test_suite='couchbase.tests.test_sync', **setup_kw diff --git a/src/bucket.c b/src/bucket.c index a1d8ec82e..120ef937d 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -617,6 +617,11 @@ static struct PyMemberDef Bucket_TABLE_members[] = { "This attribute can only be set from the constructor.\n") }, + { "check_type", T_UINT, offsetof(pycbc_Bucket, check_type), + 0, + PyDoc_STR("What sort of nremaining consistency check to do after a wait.") + }, + { "bucket", T_OBJECT_EX, offsetof(pycbc_Bucket, bucket), READONLY, PyDoc_STR("Name of the bucket this object is connected to") @@ -859,13 +864,13 @@ Bucket__init__(pycbc_Bucket *self, PyObject *dfl_fmt = NULL; PyObject *tc = NULL; struct lcb_create_st create_opts = { 0 }; - + self->check_type = PYCBC_CHECK_STRICT; /** * This xmacro enumerates the constructor keywords, targets, and types. * This was converted into an xmacro to ease the process of adding or * removing various parameters. */ -#define XCTOR_ARGS_NOTRACING(X) \ +#define XCTOR_ARGS_NOTRACING(X) \ X("connection_string", &create_opts.v.v3.connstr, "z") \ X("connstr", &create_opts.v.v3.connstr, "z") \ X("username", &create_opts.v.v3.username, "z") \ @@ -878,7 +883,7 @@ Bucket__init__(pycbc_Bucket *self, X("_flags", &self->flags, "I") \ X("_conntype", &conntype, "i") \ X("_iops", &iops_O, "O") \ - + X("_check_inconsistent", &self->check_type, "I") #ifdef PYCBC_TRACING #define XCTOR_ARGS(X)\ XCTOR_ARGS_NOTRACING(X)\ diff --git a/src/constants.c b/src/constants.c index 1941005b8..ecabe39c6 100644 --- a/src/constants.c +++ b/src/constants.c @@ -201,6 +201,11 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) ADD_MACRO(LCB_CNTL_N1QL_TIMEOUT); ADD_MACRO(LCB_CNTL_COMPRESSION_OPTS); ADD_MACRO(LCB_CNTL_LOG_REDACTION); + + ADD_MACRO(PYCBC_CHECK_NONE); + ADD_MACRO(PYCBC_CHECK_STRICT); + ADD_MACRO(PYCBC_CHECK_FAIL); + ADD_STRING(LCB_LOG_MD_OTAG); ADD_STRING(LCB_LOG_MD_CTAG); ADD_STRING(LCB_LOG_SD_OTAG); @@ -233,6 +238,13 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) ADD_MACRO(LCBCRYPTO_KEY_ENCRYPT); ADD_MACRO(LCBCRYPTO_KEY_DECRYPT); +#define PYCBC_PING_STATUS(X) ADD_MACRO(LCB_PINGSTATUS_##X); +#define PYCBC_PP_PING_STATUS(X) \ + X(OK) \ + X(TIMEOUT) \ + X(ERROR) + + PYCBC_PP_PING_STATUS(PYCBC_PING_STATUS) LCB_CONSTANT(VERSION); ADD_MACRO(PYCBC_CRYPTO_VERSION); #ifdef PYCBC_TRACING @@ -242,6 +254,8 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) ADD_CONSTANT("PYCBC_TRACING",0); #endif setup_compression_map(module, public_constants, handler); + ADD_MACRO(LCB_CNTL_COMPRESSION_MIN_SIZE); + ADD_MACRO(LCB_CNTL_COMPRESSION_MIN_RATIO); setup_crypto_exceptions(module, handler); PyModule_AddObject( module, "CRYPTO_EXCEPTIONS", pycbc_gen_crypto_exception_map()); diff --git a/src/ext.c b/src/ext.c index 32c439686..16f7e4ff6 100644 --- a/src/ext.c +++ b/src/ext.c @@ -1688,7 +1688,6 @@ void pycbc_MultiResult_init_context(pycbc_MultiResult *self, PyObject *curkey, PYCBC_DEBUG_PYFORMAT_CONTEXT(context, "After insertion:[%R]", mres_dict); DONE: PYCBC_PYBUF_RELEASE(&keybuf); - PYCBC_XDECREF(curkey); } int pycbc_is_async_or_pipeline(const pycbc_Bucket *self) { return self->flags & PYCBC_CONN_F_ASYNC || self->pipeline_queue; } diff --git a/src/miscops.c b/src/miscops.c index e14fc3a6e..8cdae76ef 100644 --- a/src/miscops.c +++ b/src/miscops.c @@ -397,7 +397,7 @@ TRACED_FUNCTION_WRAPPER(_ping,LCBTRACE_OP_REQUEST_ENCODING,Bucket) struct pycbc_common_vars cv = PYCBC_COMMON_VARS_STATIC_INIT; lcb_CMDPING cmd = {0}; cmd.services = LCB_PINGSVC_F_KV | LCB_PINGSVC_F_N1QL | LCB_PINGSVC_F_VIEWS | - LCB_PINGSVC_F_FTS; + LCB_PINGSVC_F_FTS | LCB_PINGSVC_F_ANALYTICS; cmd.options = LCB_PINGOPT_F_JSON | LCB_PINGOPT_F_JSONPRETTY; if (1) { cmd.options |= LCB_PINGOPT_F_JSONDETAILS; diff --git a/src/multiresult.c b/src/multiresult.c index a0c0d068c..4fce74609 100644 --- a/src/multiresult.c +++ b/src/multiresult.c @@ -320,14 +320,11 @@ int pycbc_multiresult_maybe_raise2(pycbc_MultiResult *self, PyObject_SetAttrString(value, "result", (PyObject*)res); } - PyErr_Restore(type, value, traceback); - /** * This is needed since the exception object will later contain * a reference to ourselves. If we don't free the original exception, * then we'll be stuck with a circular reference */ - if (PyObject_IsInstance(value, pycbc_helpers.default_exception)) { PyObject_SetAttrString(value, "all_results", (PyObject*)self); Py_XDECREF(self->exceptions); @@ -336,6 +333,8 @@ int pycbc_multiresult_maybe_raise2(pycbc_MultiResult *self, self->exceptions = NULL; self->errop = NULL; + PyErr_Restore(type, value, traceback); + return 1; } diff --git a/src/oputil.c b/src/oputil.c index 994dc7f3d..ac97805b8 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -64,11 +64,18 @@ pycbc_common_vars_wait, struct pycbc_common_vars *cv, pycbc_Bucket *self) } pycbc_oputil_wait_common(self, context); - if (!pycbc_assert(self->nremaining == 0)) { + if (self->nremaining || self->check_type == PYCBC_CHECK_FAIL) { fprintf(stderr, "Remaining count %d!= 0. Adjusting", (int)self->nremaining); self->nremaining = 0; + if (self->check_type != PYCBC_CHECK_NONE) { + cv->ret = NULL; + PYCBC_EXC_WRAP(PYCBC_EXC_INTERNAL, + 0, + "self->nremaining!=0, resetting to 0"); + goto FAIL; + } } if (pycbc_multiresult_maybe_raise(cv->mres)) { @@ -76,6 +83,7 @@ pycbc_common_vars_wait, struct pycbc_common_vars *cv, pycbc_Bucket *self) } cv->ret = pycbc_multiresult_get_result(cv->mres); +FAIL: Py_DECREF(cv->mres); cv->mres = NULL; diff --git a/src/pycbc.h b/src/pycbc.h index c3c49b0bb..2886abc79 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -507,6 +507,12 @@ struct pycbc_Tracer; #endif #endif +typedef enum { + PYCBC_CHECK_STRICT, + PYCBC_CHECK_NONE, + PYCBC_CHECK_FAIL +} pycbc_check_type; + typedef struct { PyObject_HEAD @@ -579,6 +585,8 @@ typedef struct { pycbc_dur_params dur_global; unsigned long dur_timeout; + pycbc_check_type check_type; + } pycbc_Bucket; #ifdef PYCBC_TRACING diff --git a/tests.ini.sample b/tests.ini.sample index a0ceea8a6..0613ab82d 100644 --- a/tests.ini.sample +++ b/tests.ini.sample @@ -25,7 +25,7 @@ bucket_name = default bucket_password = ; Set this to true if there is a real cluster available -enabled = True +enabled = False [mock] ; Set this to enabled to use the mock enabled = True