Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 3e665f7

Browse files
committed
Pushing release 1.0.10 (release for 1.8.9 SDK).
1 parent d2ad2a8 commit 3e665f7

24 files changed

+601
-241
lines changed

RELEASE_NOTES

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
Release 1.0.10 (included in SDK 1.8.9)
2+
- Issue 239: Added support for verbose_name to ComputedProperty
3+
- Issue 238: Allow deferred library to defer @ndb.toplevel.
4+
- Issue 233: Fixed error for NoneType in repeated LocalStructuredProperty.
5+
- Fixed deserialization inefficiency for large repeated StructuredProperty.
6+
17
Release 1.0.9 (included in SDK 1.8.0)
28

39
- Issue 229: Add support for 'distinct' queries

ndb/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""NDB -- A new datastore API for the Google App Engine Python runtime."""
22

3-
__version__ = '1.0.9'
3+
__version__ = '1.0.10'
44

55
__all__ = []
66

ndb/blobstore_test.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44
import cStringIO
55
import datetime
66
import pickle
7-
import unittest
87

98
from .google_imports import namespace_manager
109
from .google_imports import datastore_types
10+
from .google_test_imports import unittest
1111

1212
from . import blobstore
1313
from . import model
@@ -291,10 +291,5 @@ def testBlobReader(self):
291291
self.assertEqual(f.read(), 'abcde')
292292
self.assertEqual(f.blob_info, b)
293293

294-
295-
def main():
296-
unittest.main()
297-
298-
299294
if __name__ == '__main__':
300-
main()
295+
unittest.main()

ndb/context.py

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -585,6 +585,23 @@ def _get_memcache_deadline(self, options=None):
585585
# If this returns None, the system default (typically, 5) will apply.
586586
return ContextOptions.memcache_deadline(options, self._conn.config)
587587

588+
589+
def _load_from_cache_if_available(self, key):
590+
"""Returns a cached Model instance given the entity key if available.
591+
592+
Args:
593+
key: Key instance.
594+
595+
Returns:
596+
A Model instance if the key exists in the cache.
597+
"""
598+
if key in self._cache:
599+
entity = self._cache[key] # May be None, meaning "doesn't exist".
600+
if entity is None or entity._key == key:
601+
# If entity's key didn't change later, it is ok.
602+
# See issue 13. http://goo.gl/jxjOP
603+
raise tasklets.Return(entity)
604+
588605
# TODO: What about conflicting requests to different autobatchers,
589606
# e.g. tasklet A calls get() on a given key while tasklet B calls
590607
# delete()? The outcome is nondeterministic, depending on which
@@ -604,17 +621,12 @@ def get(self, key, **ctx_options):
604621
**ctx_options: Context options.
605622
606623
Returns:
607-
A Model instance it the key exists in the datastore; None otherwise.
624+
A Model instance if the key exists in the datastore; None otherwise.
608625
"""
609626
options = _make_ctx_options(ctx_options)
610627
use_cache = self._use_cache(key, options)
611628
if use_cache:
612-
if key in self._cache:
613-
entity = self._cache[key] # May be None, meaning "doesn't exist".
614-
if entity is None or entity._key == key:
615-
# If entity's key didn't change later, it is ok.
616-
# See issue 13. http://goo.gl/jxjOP
617-
raise tasklets.Return(entity)
629+
self._load_from_cache_if_available(key)
618630

619631
use_datastore = self._use_datastore(key, options)
620632
if (use_datastore and
@@ -631,6 +643,9 @@ def get(self, key, **ctx_options):
631643
mvalue = yield self.memcache_get(mkey, for_cas=use_datastore,
632644
namespace=ns, use_cache=True,
633645
deadline=memcache_deadline)
646+
# A value may have appeared while yielding.
647+
if use_cache:
648+
self._load_from_cache_if_available(key)
634649
if mvalue not in (_LOCKED, None):
635650
cls = model.Model._kind_map.get(key.kind())
636651
if cls is None:
@@ -794,7 +809,6 @@ def helper():
794809
try:
795810
inq = tasklets.SerialQueueFuture()
796811
query.run_to_queue(inq, self._conn, options)
797-
is_ancestor_query = query.ancestor is not None
798812
while True:
799813
try:
800814
batch, i, ent = yield inq.getq()
@@ -903,10 +917,10 @@ def transaction(self, callback, **ctx_options):
903917
adapter=parent._conn.adapter,
904918
config=parent._conn.config,
905919
transaction=transaction)
906-
old_ds_conn = datastore._GetConnection()
907920
tctx = parent.__class__(conn=tconn,
908921
auto_batcher_class=parent._auto_batcher_class,
909922
parent_context=parent)
923+
tctx._old_ds_conn = datastore._GetConnection()
910924
ok = False
911925
try:
912926
# Copy memcache policies. Note that get() will never use
@@ -929,7 +943,7 @@ def transaction(self, callback, **ctx_options):
929943
raise
930944
except Exception:
931945
t, e, tb = sys.exc_info()
932-
yield tconn.async_rollback(options) # TODO: Don't block???
946+
tconn.async_rollback(options) # Fire and forget.
933947
if issubclass(t, datastore_errors.Rollback):
934948
# TODO: Raise value using tasklets.get_return_value(t)?
935949
return
@@ -943,7 +957,8 @@ def transaction(self, callback, **ctx_options):
943957
raise tasklets.Return(result)
944958
# The finally clause will run the on-commit queue.
945959
finally:
946-
datastore._SetConnection(old_ds_conn)
960+
datastore._SetConnection(tctx._old_ds_conn)
961+
del tctx._old_ds_conn
947962
if ok:
948963
# Call the callbacks collected in the transaction context's
949964
# on-commit queue. If the transaction failed the queue is

ndb/context_test.py

Lines changed: 41 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,14 @@
55
import socket
66
import threading
77
import time
8-
import unittest
98

109
from .google_imports import apiproxy_errors
1110
from .google_imports import datastore
1211
from .google_imports import datastore_errors
1312
from .google_imports import datastore_rpc
1413
from .google_imports import memcache
1514
from .google_imports import taskqueue
15+
from .google_test_imports import unittest
1616

1717
from . import context
1818
from . import eventloop
@@ -717,6 +717,32 @@ def callback():
717717
foo().check_success()
718718
self.assertEqual(key.get(), None)
719719

720+
def testContext_TransactionRollbackException(self):
721+
self.ExpectWarnings()
722+
key = model.Key('Foo', 1)
723+
724+
class CustomException(Exception):
725+
pass
726+
def bad_transaction(*arg, **kwargs):
727+
return datastore_rpc.datastore_pb.Transaction()
728+
@tasklets.tasklet
729+
def foo():
730+
ent = model.Expando(key=key, bar=1)
731+
@tasklets.tasklet
732+
def callback():
733+
# Cause rollback to return an exception
734+
tasklets.get_context()._conn._end_transaction = bad_transaction
735+
yield ent.put_async()
736+
raise CustomException()
737+
yield self.ctx.transaction(callback)
738+
try:
739+
foo().check_success()
740+
self.fail()
741+
except CustomException:
742+
pass # good
743+
744+
self.assertEqual(key.get(), None)
745+
720746
def testContext_TransactionAddTask(self):
721747
self.ExpectWarnings()
722748
key = model.Key('Foo', 1)
@@ -1390,6 +1416,19 @@ def many_txns():
13901416
conn_after = datastore._GetConnection()
13911417
self.assertEqual(conn_before, conn_after)
13921418

1419+
def testMemcacheAndContextCache(self):
1420+
self.ctx.set_datastore_policy(True)
1421+
self.ctx.set_cache_policy(False)
1422+
self.ctx.set_memcache_policy(True)
1423+
class EmptyModel(model.Model):
1424+
pass
1425+
key = EmptyModel().put()
1426+
self.ctx.get(key).get_result() # pull entity into memcache
1427+
self.ctx.set_cache_policy(True)
1428+
f1, f2 = self.ctx.get(key), self.ctx.get(key)
1429+
e1, e2 = f1.get_result(), f2.get_result()
1430+
self.assertTrue(e1 is e2)
1431+
13931432

13941433
class ContextFutureCachingTests(test_utils.NDBTest):
13951434
# See issue 62. http://goo.gl/5zLkK
@@ -1463,10 +1502,5 @@ def testMemcacheSetFutureCaching(self):
14631502
self.assertFalse(f1 is f4,
14641503
'Context memcache get future cached after result known.')
14651504

1466-
1467-
def main():
1468-
unittest.main()
1469-
1470-
14711505
if __name__ == '__main__':
1472-
main()
1506+
unittest.main()

ndb/eventloop_test.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@
33
import logging
44
import os
55
import time
6-
import unittest
76

87
from .google_imports import apiproxy_stub_map
98
from .google_imports import datastore_rpc
9+
from .google_test_imports import unittest
1010

1111
from . import eventloop
1212
from . import test_utils
@@ -151,10 +151,6 @@ class M(model.Model): pass
151151
ev = eventloop.get_event_loop() # A new event loop.
152152
self.assertEqual(len(ev.rpcs), 0)
153153

154-
155-
def main():
154+
if __name__ == '__main__':
156155
unittest.main()
157156

158-
159-
if __name__ == '__main__':
160-
main()

ndb/google_imports.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55
"""
66

77
try:
8-
from google.appengine import api
8+
from google.appengine.datastore import entity_pb
99
normal_environment = True
1010
except ImportError:
11-
from google3.apphosting import api
11+
from google3.storage.onestore.v3 import entity_pb
1212
normal_environment = False
1313

1414
if normal_environment:
@@ -27,6 +27,9 @@
2727
from google.appengine.api.prospective_search import prospective_search_pb
2828
from google.appengine.datastore import datastore_query
2929
from google.appengine.datastore import datastore_rpc
30+
# This line will fail miserably for any app using auto_import_fixer
31+
# because auto_import_fixer only set up simple alias between
32+
# google and google3. But entity_pb is move to a different path completely.
3033
from google.appengine.datastore import entity_pb
3134
from google.appengine.ext.blobstore import blobstore as ext_blobstore
3235
from google.appengine.ext import db

ndb/google_test_imports.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from google.appengine.api.prospective_search import prospective_search_stub
1010
from google.appengine.datastore import datastore_stub_util
1111
from google.appengine.ext import testbed
12+
import unittest
1213
else:
1314
# Prospective search is optional.
1415
try:
@@ -17,3 +18,4 @@
1718
pass
1819
from google3.apphosting.datastore import datastore_stub_util
1920
from google3.apphosting.ext import testbed
21+
from google3.testing.pybase import googletest as unittest

ndb/key_test.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,11 @@
22

33
import base64
44
import pickle
5-
import unittest
65

76
from .google_imports import datastore_errors
87
from .google_imports import datastore_types
98
from .google_imports import entity_pb
9+
from .google_test_imports import unittest
1010

1111
from . import eventloop
1212
from . import key
@@ -469,10 +469,5 @@ def testFromOldKey(self):
469469
old_key2 = new_key.to_old_key()
470470
self.assertEquals(old_key, old_key2)
471471

472-
473-
def main():
474-
unittest.main()
475-
476-
477472
if __name__ == '__main__':
478-
main()
473+
unittest.main()

ndb/metadata_test.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
"""Tests for metadata.py."""
22

3-
import unittest
4-
53
from .google_imports import namespace_manager
64

5+
from .google_test_imports import unittest
6+
77
from . import context
88
from . import metadata
99
from . import model
@@ -131,9 +131,5 @@ def testGetEntityGroupVersion(self):
131131
self.assertTrue(metadata.get_entity_group_version(foo_e.key) > 0)
132132

133133

134-
def main():
135-
unittest.main()
136-
137-
138134
if __name__ == '__main__':
139-
main()
135+
unittest.main()

0 commit comments

Comments
 (0)