40 import traceback |
40 import traceback |
41 from xml.sax import saxutils |
41 from xml.sax import saxutils |
42 |
42 |
43 from google.appengine.api import api_base_pb |
43 from google.appengine.api import api_base_pb |
44 from google.appengine.api import apiproxy_stub_map |
44 from google.appengine.api import apiproxy_stub_map |
|
45 from google.appengine.api import capabilities |
45 from google.appengine.api import datastore_errors |
46 from google.appengine.api import datastore_errors |
46 from google.appengine.api import datastore_types |
47 from google.appengine.api import datastore_types |
47 from google.appengine.datastore import datastore_index |
48 from google.appengine.datastore import datastore_index |
48 from google.appengine.datastore import datastore_pb |
49 from google.appengine.datastore import datastore_pb |
49 from google.appengine.runtime import apiproxy_errors |
50 from google.appengine.runtime import apiproxy_errors |
50 from google.appengine.datastore import entity_pb |
51 from google.appengine.datastore import entity_pb |
51 |
52 |
52 try: |
53 try: |
53 from google.appengine.api.labs.taskqueue import taskqueue_service_pb |
54 __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb') |
|
55 taskqueue_service_pb = sys.modules.get( |
|
56 'google.appengine.api.labs.taskqueue.taskqueue_service_pb') |
54 except ImportError: |
57 except ImportError: |
55 from google.appengine.api.taskqueue import taskqueue_service_pb |
58 from google.appengine.api.taskqueue import taskqueue_service_pb |
56 |
59 |
57 MAX_ALLOWABLE_QUERIES = 30 |
60 MAX_ALLOWABLE_QUERIES = 30 |
58 |
61 |
|
62 MAXIMUM_RESULTS = 1000 |
|
63 |
59 DEFAULT_TRANSACTION_RETRIES = 3 |
64 DEFAULT_TRANSACTION_RETRIES = 3 |
|
65 |
|
66 READ_CAPABILITY = capabilities.CapabilitySet('datastore_v3') |
|
67 WRITE_CAPABILITY = capabilities.CapabilitySet( |
|
68 'datastore_v3', |
|
69 capabilities=['write']) |
60 |
70 |
61 _MAX_INDEXED_PROPERTIES = 5000 |
71 _MAX_INDEXED_PROPERTIES = 5000 |
62 |
72 |
63 _MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000 |
73 _MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000 |
64 |
74 |
607 assert last_path.name() |
617 assert last_path.name() |
608 |
618 |
609 unindexed_properties = [p.name() for p in pb.raw_property_list()] |
619 unindexed_properties = [p.name() for p in pb.raw_property_list()] |
610 |
620 |
611 e = Entity(unicode(last_path.type().decode('utf-8')), |
621 e = Entity(unicode(last_path.type().decode('utf-8')), |
612 unindexed_properties=unindexed_properties) |
622 unindexed_properties=unindexed_properties, |
|
623 _app=pb.key().app()) |
613 ref = e.__key._Key__reference |
624 ref = e.__key._Key__reference |
614 ref.CopyFrom(pb.key()) |
625 ref.CopyFrom(pb.key()) |
615 |
626 |
616 temporary_values = {} |
627 temporary_values = {} |
617 |
628 |
749 __app = None |
760 __app = None |
750 __orderings = None |
761 __orderings = None |
751 __cached_count = None |
762 __cached_count = None |
752 __hint = None |
763 __hint = None |
753 __ancestor = None |
764 __ancestor = None |
|
765 __compile = None |
|
766 |
|
767 __cursor = None |
754 |
768 |
755 __filter_order = None |
769 __filter_order = None |
756 __filter_counter = 0 |
770 __filter_counter = 0 |
757 |
771 |
758 __inequality_prop = None |
772 __inequality_prop = None |
759 __inequality_count = 0 |
773 __inequality_count = 0 |
760 |
774 |
761 def __init__(self, kind=None, filters={}, _app=None, keys_only=False, |
775 def __init__(self, kind=None, filters={}, _app=None, keys_only=False, |
762 _namespace=None): |
776 compile=True, cursor=None, _namespace=None): |
763 """Constructor. |
777 """Constructor. |
764 |
778 |
765 Raises BadArgumentError if kind is not a string. Raises BadValueError or |
779 Raises BadArgumentError if kind is not a string. Raises BadValueError or |
766 BadFilterError if filters is not a dictionary of valid filters. |
780 BadFilterError if filters is not a dictionary of valid filters. |
767 |
781 |
782 self.update(filters) |
796 self.update(filters) |
783 |
797 |
784 self.__app = datastore_types.ResolveAppIdNamespace(_app, |
798 self.__app = datastore_types.ResolveAppIdNamespace(_app, |
785 _namespace).to_encoded() |
799 _namespace).to_encoded() |
786 self.__keys_only = keys_only |
800 self.__keys_only = keys_only |
|
801 self.__compile = compile |
|
802 self.__cursor = cursor |
787 |
803 |
788 def Order(self, *orderings): |
804 def Order(self, *orderings): |
789 """Specify how the query results should be sorted. |
805 """Specify how the query results should be sorted. |
790 |
806 |
791 Result entities will be sorted by the first property argument, then by the |
807 Result entities will be sorted by the first property argument, then by the |
933 |
949 |
934 def IsKeysOnly(self): |
950 def IsKeysOnly(self): |
935 """Returns True if this query is keys only, false otherwise.""" |
951 """Returns True if this query is keys only, false otherwise.""" |
936 return self.__keys_only |
952 return self.__keys_only |
937 |
953 |
|
954 def GetCompiledQuery(self): |
|
955 try: |
|
956 return self.__compiled_query |
|
957 except AttributeError: |
|
958 raise AssertionError('No cursor available, either this query has not ' |
|
959 'been executed or there is no compilation ' |
|
960 'available for this kind of query') |
|
961 |
938 def Run(self): |
962 def Run(self): |
939 """Runs this query. |
963 """Runs this query. |
940 |
964 |
941 If a filter string is invalid, raises BadFilterError. If a filter value is |
965 If a filter string is invalid, raises BadFilterError. If a filter value is |
942 invalid, raises BadValueError. If an IN filter is provided, and a sort |
966 invalid, raises BadValueError. If an IN filter is provided, and a sort |
947 |
971 |
948 Returns: |
972 Returns: |
949 # an iterator that provides access to the query results |
973 # an iterator that provides access to the query results |
950 Iterator |
974 Iterator |
951 """ |
975 """ |
|
976 self.__compile = False |
952 return self._Run() |
977 return self._Run() |
953 |
978 |
954 def _Run(self, limit=None, offset=None, |
979 def _Run(self, limit=None, offset=None, |
955 prefetch_count=None, next_count=None): |
980 prefetch_count=None, next_count=None): |
956 """Runs this query, with an optional result limit and an optional offset. |
981 """Runs this query, with an optional result limit and an optional offset. |
961 This is not intended to be used by application developers. Use Get() |
986 This is not intended to be used by application developers. Use Get() |
962 instead! |
987 instead! |
963 """ |
988 """ |
964 pb = self._ToPb(limit, offset, prefetch_count) |
989 pb = self._ToPb(limit, offset, prefetch_count) |
965 result = datastore_pb.QueryResult() |
990 result = datastore_pb.QueryResult() |
|
991 api_call = 'RunQuery' |
|
992 if self.__cursor: |
|
993 pb = self._ToCompiledPb(pb, self.__cursor, prefetch_count) |
|
994 api_call = 'RunCompiledQuery' |
966 |
995 |
967 try: |
996 try: |
968 apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result) |
997 apiproxy_stub_map.MakeSyncCall('datastore_v3', api_call, pb, result) |
969 except apiproxy_errors.ApplicationError, err: |
998 except apiproxy_errors.ApplicationError, err: |
970 try: |
999 try: |
971 _ToDatastoreError(err) |
1000 raise _ToDatastoreError(err) |
972 except datastore_errors.NeedIndexError, exc: |
1001 except datastore_errors.NeedIndexError, exc: |
973 yaml = datastore_index.IndexYamlForQuery( |
1002 yaml = datastore_index.IndexYamlForQuery( |
974 *datastore_index.CompositeIndexForQuery(pb)[1:-1]) |
1003 *datastore_index.CompositeIndexForQuery(pb)[1:-1]) |
975 raise datastore_errors.NeedIndexError( |
1004 raise datastore_errors.NeedIndexError( |
976 str(exc) + '\nThis query needs this index:\n' + yaml) |
1005 str(exc) + '\nThis query needs this index:\n' + yaml) |
|
1006 |
|
1007 if self.__compile: |
|
1008 if result.has_compiled_query(): |
|
1009 self.__compiled_query = result.compiled_query() |
|
1010 else: |
|
1011 self.__compiled_query = None |
977 |
1012 |
978 return Iterator(result, batch_size=next_count) |
1013 return Iterator(result, batch_size=next_count) |
979 |
1014 |
980 def Get(self, limit, offset=0): |
1015 def Get(self, limit, offset=0): |
981 """Fetches and returns a maximum number of results from the query. |
1016 """Fetches and returns a maximum number of results from the query. |
1037 just return this number. Providing this argument makes the count |
1072 just return this number. Providing this argument makes the count |
1038 operation more efficient. |
1073 operation more efficient. |
1039 Returns: |
1074 Returns: |
1040 The number of results. |
1075 The number of results. |
1041 """ |
1076 """ |
|
1077 self.__compile = False |
1042 if self.__cached_count: |
1078 if self.__cached_count: |
1043 return self.__cached_count |
1079 return self.__cached_count |
1044 |
1080 |
1045 resp = api_base_pb.Integer64Proto() |
1081 resp = api_base_pb.Integer64Proto() |
1046 try: |
1082 try: |
1226 if self.__kind is not None: |
1262 if self.__kind is not None: |
1227 pb.set_kind(self.__kind.encode('utf-8')) |
1263 pb.set_kind(self.__kind.encode('utf-8')) |
1228 pb.set_keys_only(bool(self.__keys_only)) |
1264 pb.set_keys_only(bool(self.__keys_only)) |
1229 if self.__app: |
1265 if self.__app: |
1230 pb.set_app(self.__app.encode('utf-8')) |
1266 pb.set_app(self.__app.encode('utf-8')) |
|
1267 if self.__compile: |
|
1268 pb.set_compile(True) |
1231 if limit is not None: |
1269 if limit is not None: |
1232 pb.set_limit(limit) |
1270 pb.set_limit(limit) |
1233 if offset is not None: |
1271 if offset is not None: |
1234 pb.set_offset(offset) |
1272 pb.set_offset(offset) |
1235 if count is not None: |
1273 if count is not None: |
1271 order.set_property(property.encode('utf-8')) |
1309 order.set_property(property.encode('utf-8')) |
1272 order.set_direction(direction) |
1310 order.set_direction(direction) |
1273 |
1311 |
1274 return pb |
1312 return pb |
1275 |
1313 |
|
1314 def _ToCompiledPb(self, query_pb, cursor, count=None): |
|
1315 compiled_pb = datastore_pb.RunCompiledQueryRequest() |
|
1316 compiled_pb.mutable_original_query().CopyFrom(query_pb) |
|
1317 compiled_pb.mutable_compiled_query().CopyFrom(cursor) |
|
1318 if count is not None: |
|
1319 compiled_pb.set_count(count) |
|
1320 return compiled_pb |
1276 |
1321 |
1277 def AllocateIds(model_key, size): |
1322 def AllocateIds(model_key, size): |
1278 """Allocates a range of IDs of size for the key defined by model_key |
1323 """Allocates a range of IDs of size for the key defined by model_key |
1279 |
1324 |
1280 Allocates a range of IDs in the datastore such that those IDs will not |
1325 Allocates a range of IDs in the datastore such that those IDs will not |
1296 'Cannot allocate IDs for more than one model key at a time') |
1341 'Cannot allocate IDs for more than one model key at a time') |
1297 |
1342 |
1298 if size > _MAX_ID_BATCH_SIZE: |
1343 if size > _MAX_ID_BATCH_SIZE: |
1299 raise datastore_errors.BadArgumentError( |
1344 raise datastore_errors.BadArgumentError( |
1300 'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE) |
1345 'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE) |
|
1346 if size <= 0: |
|
1347 raise datastore_errors.BadArgumentError( |
|
1348 'Cannot allocate less than 1 id') |
1301 |
1349 |
1302 req = datastore_pb.AllocateIdsRequest() |
1350 req = datastore_pb.AllocateIdsRequest() |
1303 req.mutable_model_key().CopyFrom(keys[0]._Key__reference) |
1351 req.mutable_model_key().CopyFrom(keys[0]._ToPb()) |
1304 req.set_size(size) |
1352 req.set_size(size) |
1305 |
1353 |
1306 resp = datastore_pb.AllocateIdsResponse() |
1354 resp = datastore_pb.AllocateIdsResponse() |
1307 try: |
1355 try: |
1308 apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AllocateIds', req, resp) |
1356 apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AllocateIds', req, resp) |
1336 raise datastore_errors.BadQueryError( |
1384 raise datastore_errors.BadQueryError( |
1337 'MultiQuery does not support keys_only.') |
1385 'MultiQuery does not support keys_only.') |
1338 |
1386 |
1339 self.__bound_queries = bound_queries |
1387 self.__bound_queries = bound_queries |
1340 self.__orderings = orderings |
1388 self.__orderings = orderings |
|
1389 self.__compile = False |
1341 |
1390 |
1342 def __str__(self): |
1391 def __str__(self): |
1343 res = 'MultiQuery: ' |
1392 res = 'MultiQuery: ' |
1344 for query in self.__bound_queries: |
1393 for query in self.__bound_queries: |
1345 res = '%s %s' % (res, str(query)) |
1394 res = '%s %s' % (res, str(query)) |
1450 return result |
1499 return result |
1451 return 0 |
1500 return 0 |
1452 |
1501 |
1453 def __GetValueForId(self, sort_order_entity, identifier, sort_order): |
1502 def __GetValueForId(self, sort_order_entity, identifier, sort_order): |
1454 value = _GetPropertyValue(sort_order_entity.__entity, identifier) |
1503 value = _GetPropertyValue(sort_order_entity.__entity, identifier) |
1455 entity_key = sort_order_entity.__entity.key() |
1504 if isinstance(value, list): |
1456 if (entity_key, identifier) in self.__min_max_value_cache: |
1505 entity_key = sort_order_entity.__entity.key() |
1457 value = self.__min_max_value_cache[(entity_key, identifier)] |
1506 if (entity_key, identifier) in self.__min_max_value_cache: |
1458 elif isinstance(value, list): |
1507 value = self.__min_max_value_cache[(entity_key, identifier)] |
1459 if sort_order == Query.DESCENDING: |
1508 elif sort_order == Query.DESCENDING: |
1460 value = min(value) |
1509 value = min(value) |
1461 else: |
1510 else: |
1462 value = max(value) |
1511 value = max(value) |
1463 self.__min_max_value_cache[(entity_key, identifier)] = value |
1512 self.__min_max_value_cache[(entity_key, identifier)] = value |
1464 |
1513 |
1559 for i in self.Run(): |
1608 for i in self.Run(): |
1560 count += 1 |
1609 count += 1 |
1561 return count |
1610 return count |
1562 else: |
1611 else: |
1563 return len(self.Get(limit)) |
1612 return len(self.Get(limit)) |
|
1613 |
|
1614 def GetCompiledQuery(self): |
|
1615 raise AssertionError('No cursor available for a MultiQuery (queries ' |
|
1616 'using "IN" or "!=" operators)') |
1564 |
1617 |
1565 def __setitem__(self, query_filter, value): |
1618 def __setitem__(self, query_filter, value): |
1566 """Add a new filter by setting it on all subqueries. |
1619 """Add a new filter by setting it on all subqueries. |
1567 |
1620 |
1568 If any of the setting operations raise an exception, the ones |
1621 If any of the setting operations raise an exception, the ones |
1621 |
1674 |
1622 def __iter__(self): |
1675 def __iter__(self): |
1623 return iter(self.__bound_queries) |
1676 return iter(self.__bound_queries) |
1624 |
1677 |
1625 |
1678 |
|
1679 |
1626 class Iterator(object): |
1680 class Iterator(object): |
1627 """An iterator over the results of a datastore query. |
1681 """An iterator over the results of a datastore query. |
1628 |
1682 |
1629 Iterators are used to access the results of a Query. An iterator is |
1683 Iterators are used to access the results of a Query. An iterator is |
1630 obtained by building a Query, then calling Run() on it. |
1684 obtained by building a Query, then calling Run() on it. |
1665 |
1719 |
1666 Returns: |
1720 Returns: |
1667 # a list of entities or keys |
1721 # a list of entities or keys |
1668 [Entity or Key, ...] |
1722 [Entity or Key, ...] |
1669 """ |
1723 """ |
|
1724 if count > MAXIMUM_RESULTS: |
|
1725 count = MAXIMUM_RESULTS |
1670 entity_list = self._Next(count) |
1726 entity_list = self._Next(count) |
1671 while len(entity_list) < count and self.__more_results: |
1727 while len(entity_list) < count and self.__more_results: |
1672 next_results = self._Next(count - len(entity_list), self.__batch_size) |
1728 next_results = self._Next(count - len(entity_list)) |
1673 if not next_results: |
1729 if not next_results: |
1674 break |
1730 break |
1675 entity_list += next_results |
1731 entity_list += next_results |
1676 return entity_list; |
1732 return entity_list; |
1677 |
1733 |
2163 datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError, |
2219 datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError, |
2164 datastore_pb.Error.TIMEOUT: datastore_errors.Timeout, |
2220 datastore_pb.Error.TIMEOUT: datastore_errors.Timeout, |
2165 } |
2221 } |
2166 |
2222 |
2167 if err.application_error in errors: |
2223 if err.application_error in errors: |
2168 raise errors[err.application_error](err.error_detail) |
2224 return errors[err.application_error](err.error_detail) |
2169 else: |
2225 else: |
2170 raise datastore_errors.Error(err.error_detail) |
2226 return datastore_errors.Error(err.error_detail) |