thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py
changeset 686 df109be0567c
parent 297 35211afcd563
child 828 f5fd65cc3bf3
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -38,14 +38,16 @@
 import datetime
 import logging
 import os
-import pickle
 import struct
 import sys
 import tempfile
 import threading
 import warnings
 
+import cPickle as pickle
+
 from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub
 from google.appengine.api import datastore
 from google.appengine.api import datastore_admin
 from google.appengine.api import datastore_errors
@@ -64,12 +66,18 @@
 datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
 
 
+_MAXIMUM_RESULTS = 1000
+
+
+_MAX_QUERY_OFFSET = 4000
+
 class _StoredEntity(object):
   """Simple wrapper around an entity stored by the stub.
 
   Public properties:
-    native: Native protobuf Python object, entity_pb.EntityProto.
-    encoded: Encoded binary representation of above protobuf.
+    protobuf: Native protobuf Python object, entity_pb.EntityProto.
+    encoded_protobuf: Encoded binary representation of above protobuf.
+    native: datastore.Entity instance.
   """
 
   def __init__(self, entity):
@@ -78,12 +86,14 @@
     Args:
       entity: entity_pb.EntityProto to store.
     """
-    self.native = entity
+    self.protobuf = entity
 
-    self.encoded = entity.Encode()
+    self.encoded_protobuf = entity.Encode()
+
+    self.native = datastore.Entity._FromPb(entity)
 
 
-class DatastoreFileStub(object):
+class DatastoreFileStub(apiproxy_stub.APIProxyStub):
   """ Persistent stub for the Python datastore API.
 
   Stores all entities in memory, and persists them to a file as pickled
@@ -114,8 +124,12 @@
     users.User: entity_pb.PropertyValue.kUserValueGroup,
     }
 
-  def __init__(self, app_id, datastore_file, history_file,
-               require_indexes=False):
+  def __init__(self,
+               app_id,
+               datastore_file,
+               history_file,
+               require_indexes=False,
+               service_name='datastore_v3'):
     """Constructor.
 
     Initializes and loads the datastore from the backing files, if they exist.
@@ -128,7 +142,10 @@
           datastore_file.
       require_indexes: bool, default False.  If True, composite indexes must
           exist in index.yaml for queries that need them.
+      service_name: Service name expected for all calls.
     """
+    super(DatastoreFileStub, self).__init__(service_name)
+
 
     assert isinstance(app_id, basestring) and app_id != ''
     self.__app_id = app_id
@@ -137,6 +154,8 @@
 
     self.__entities = {}
 
+    self.__schema_cache = {}
+
     self.__tx_snapshot = {}
 
     self.__queries = {}
@@ -170,6 +189,37 @@
     self.__queries = {}
     self.__transactions = {}
     self.__query_history = {}
+    self.__schema_cache = {}
+
+  def _AppKindForKey(self, key):
+    """ Get (app, kind) tuple from given key.
+
+    The (app, kind) tuple is used as an index into several internal
+    dictionaries, e.g. __entities.
+
+    Args:
+      key: entity_pb.Reference
+
+    Returns:
+      Tuple (app, kind), both are unicode strings.
+    """
+    last_path = key.path().element_list()[-1]
+    return key.app(), last_path.type()
+
+  def _StoreEntity(self, entity):
+    """ Store the given entity.
+
+    Args:
+      entity: entity_pb.EntityProto
+    """
+    key = entity.key()
+    app_kind = self._AppKindForKey(key)
+    if app_kind not in self.__entities:
+      self.__entities[app_kind] = {}
+    self.__entities[app_kind][key] = _StoredEntity(entity)
+
+    if app_kind in self.__schema_cache:
+      del self.__schema_cache[app_kind]
 
   def Read(self):
     """ Reads the datastore and history files into memory.
@@ -198,11 +248,9 @@
           raise datastore_errors.InternalError(error_msg %
                                                (self.__datastore_file, e))
 
+        self._StoreEntity(entity)
+
         last_path = entity.key().path().element_list()[-1]
-        app_kind = (entity.key().app(), last_path.type())
-        kind_dict = self.__entities.setdefault(app_kind, {})
-        kind_dict[entity.key()] = _StoredEntity(entity)
-
         if last_path.has_id() and last_path.id() >= self.__next_id:
           self.__next_id = last_path.id() + 1
 
@@ -234,7 +282,7 @@
       encoded = []
       for kind_dict in self.__entities.values():
         for entity in kind_dict.values():
-          encoded.append(entity.encoded)
+          encoded.append(entity.encoded_protobuf)
 
       self.__WritePickled(encoded, self.__datastore_file)
 
@@ -276,7 +324,11 @@
       return
 
     tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb')
-    pickle.dump(obj, tmpfile, 1)
+
+    pickler = pickle.Pickler(tmpfile, protocol=1)
+    pickler.fast = True
+    pickler.dump(obj)
+
     tmpfile.close()
 
     self.__file_lock.acquire()
@@ -296,14 +348,12 @@
     """ The main RPC entry point. service must be 'datastore_v3'. So far, the
     supported calls are 'Get', 'Put', 'RunQuery', 'Next', and 'Count'.
     """
-
-    assert service == 'datastore_v3'
+    super(DatastoreFileStub, self).MakeSyncCall(service,
+                                                call,
+                                                request,
+                                                response)
 
     explanation = []
-    assert request.IsInitialized(explanation), explanation
-
-    (getattr(self, "_Dynamic_" + call))(request, response)
-
     assert response.IsInitialized(explanation), explanation
 
   def QueryHistory(self):
@@ -322,7 +372,6 @@
       assert clone.has_key()
       assert clone.key().path().element_size() > 0
 
-      app = clone.key().app()
       last_path = clone.key().path().element_list()[-1]
       if last_path.id() == 0 and not last_path.has_name():
         self.__id_lock.acquire()
@@ -343,9 +392,7 @@
 
     try:
       for clone in clones:
-        last_path = clone.key().path().element_list()[-1]
-        kind_dict = self.__entities.setdefault((app, last_path.type()), {})
-        kind_dict[clone.key()] = _StoredEntity(clone)
+        self._StoreEntity(clone)
     finally:
       self.__entities_lock.release()
 
@@ -357,12 +404,11 @@
 
   def _Dynamic_Get(self, get_request, get_response):
     for key in get_request.key_list():
-        app = key.app()
-        last_path = key.path().element_list()[-1]
+        app_kind = self._AppKindForKey(key)
 
         group = get_response.add_entity()
         try:
-          entity = self.__entities[app, last_path.type()][key].native
+          entity = self.__entities[app_kind][key].protobuf
         except KeyError:
           entity = None
 
@@ -374,12 +420,13 @@
     self.__entities_lock.acquire()
     try:
       for key in delete_request.key_list():
+        app_kind = self._AppKindForKey(key)
         try:
-          app = key.app()
-          kind = key.path().element_list()[-1].type()
-          del self.__entities[app, kind][key]
-          if not self.__entities[app, kind]:
-            del self.__entities[app, kind]
+          del self.__entities[app_kind][key]
+          if not self.__entities[app_kind]:
+            del self.__entities[app_kind]
+
+          del self.__schema_cache[app_kind]
         except KeyError:
           pass
 
@@ -396,12 +443,15 @@
     else:
       self.__tx_lock.release()
 
+    if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
+       raise apiproxy_errors.ApplicationError(
+         datastore_pb.Error.BAD_REQUEST, "Too big query offset.")
+
     app = query.app()
 
     if self.__require_indexes:
-      required_index = datastore_index.CompositeIndexForQuery(query)
-      if required_index is not None:
-        kind, ancestor, props, num_eq_filters = required_index
+      required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
+      if required:
         required_key = kind, ancestor, props
         indexes = self.__indexes.get(app)
         if not indexes:
@@ -432,7 +482,7 @@
     try:
       query.set_app(app)
       results = self.__entities[app, query.kind()].values()
-      results = [datastore.Entity._FromPb(entity.native) for entity in results]
+      results = [entity.native for entity in results]
     except KeyError:
       results = []
 
@@ -456,24 +506,24 @@
       prop = filt.property(0).name().decode('utf-8')
       op = operators[filt.op()]
 
+      filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
+                         for filter_prop in filt.property_list()]
+
       def passes(entity):
         """ Returns True if the entity passes the filter, False otherwise. """
-        entity_vals = entity.get(prop, [])
+        if prop in datastore_types._SPECIAL_PROPERTIES:
+          entity_vals = self.__GetSpecialPropertyValue(entity, prop)
+        else:
+          entity_vals = entity.get(prop, [])
+
         if not isinstance(entity_vals, list):
           entity_vals = [entity_vals]
 
-        entity_property_list = [datastore_types.ToPropertyPb(prop, value)
-                                for value in entity_vals]
-
-        for entity_prop in entity_property_list:
-          fixed_entity_val = datastore_types.FromPropertyPb(entity_prop)
-
+        for fixed_entity_val in entity_vals:
           if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
             continue
 
-          for filter_prop in filt.property_list():
-            filter_val = datastore_types.FromPropertyPb(filter_prop)
-
+          for filter_val in filter_val_list:
             fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
               fixed_entity_val.__class__)
             filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
@@ -499,7 +549,11 @@
       results = filter(passes, results)
 
     def has_prop_indexed(entity, prop):
-      """Returns True if prop is in the entity and is not a raw property."""
+      """Returns True if prop is in the entity and is not a raw property, or
+      is a special property."""
+      if prop in datastore_types._SPECIAL_PROPERTIES:
+        return True
+
       values = entity.get(prop, [])
       if not isinstance(values, (tuple, list)):
         values = [values]
@@ -523,13 +577,17 @@
 
         reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
 
-        a_val = a[prop]
-        if isinstance(a_val, list):
-          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
+        if prop in datastore_types._SPECIAL_PROPERTIES:
+          a_val = self.__GetSpecialPropertyValue(a, prop)
+          b_val = self.__GetSpecialPropertyValue(b, prop)
+        else:
+          a_val = a[prop]
+          if isinstance(a_val, list):
+            a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
 
-        b_val = b[prop]
-        if isinstance(b_val, list):
-          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
+          b_val = b[prop]
+          if isinstance(b_val, list):
+            b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
 
         cmped = order_compare_properties(a_val, b_val)
 
@@ -573,6 +631,8 @@
       offset = query.offset()
     if query.has_limit():
       limit = query.limit()
+    if limit > _MAXIMUM_RESULTS:
+      limit = _MAXIMUM_RESULTS
     results = results[offset:limit + offset]
 
     clone = datastore_pb.Query()
@@ -584,7 +644,6 @@
       self.__query_history[clone] = 1
     self.__WriteHistory()
 
-    results = [e._ToPb() for e in results]
     self.__cursor_lock.acquire()
     cursor = self.__next_cursor
     self.__next_cursor += 1
@@ -594,7 +653,6 @@
     query_result.mutable_cursor().set_cursor(cursor)
     query_result.set_more_results(len(results) > 0)
 
-
   def _Dynamic_Next(self, next_request, query_result):
     cursor = next_request.cursor().cursor()
 
@@ -605,13 +663,12 @@
                                              'Cursor %d not found' % cursor)
 
     count = next_request.count()
-    for r in results[:count]:
-      query_result.add_result().CopyFrom(r)
+    results_pb = [r._ToPb() for r in results[:count]]
+    query_result.result_list().extend(results_pb)
     del results[:count]
 
     query_result.set_more_results(len(results) > 0)
 
-
   def _Dynamic_Count(self, query, integer64proto):
     query_result = datastore_pb.QueryResult()
     self._Dynamic_RunQuery(query, query_result)
@@ -620,7 +677,6 @@
     integer64proto.set_value(count)
     del self.__queries[cursor]
 
-
   def _Dynamic_BeginTransaction(self, request, transaction):
     self.__tx_handle_lock.acquire()
     handle = self.__next_tx_handle
@@ -670,16 +726,20 @@
 
     for app, kind in self.__entities:
       if app == app_str:
+        app_kind = (app, kind)
+        if app_kind in self.__schema_cache:
+          kinds.append(self.__schema_cache[app_kind])
+          continue
+
         kind_pb = entity_pb.EntityProto()
         kind_pb.mutable_key().set_app('')
         kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
         kind_pb.mutable_entity_group()
-        kinds.append(kind_pb)
 
         props = {}
 
-        for entity in self.__entities[(app, kind)].values():
-          for prop in entity.native.property_list():
+        for entity in self.__entities[app_kind].values():
+          for prop in entity.protobuf.property_list():
             if prop.name() not in props:
               props[prop.name()] = entity_pb.PropertyValue()
             props[prop.name()].MergeFrom(prop.value())
@@ -710,7 +770,11 @@
           prop_pb.set_name(name)
           prop_pb.mutable_value().CopyFrom(value_pb)
 
-    schema.kind_list().extend(kinds)
+        kinds.append(kind_pb)
+        self.__schema_cache[app_kind] = kind_pb
+
+    for kind_pb in kinds:
+      schema.add_kind().CopyFrom(kind_pb)
 
   def _Dynamic_CreateIndex(self, index, id_response):
     if index.id() != 0:
@@ -790,3 +854,23 @@
           return stored_index
 
     return None
+
+  @classmethod
+  def __GetSpecialPropertyValue(cls, entity, property):
+    """Returns an entity's value for a special property.
+
+    Right now, the only special property is __key__, whose value is the
+    entity's key.
+
+    Args:
+      entity: datastore.Entity
+
+    Returns:
+      property value. For __key__, a datastore_types.Key.
+
+    Raises:
+      AssertionError, if the given property is not special.
+    """
+    assert property in datastore_types._SPECIAL_PROPERTIES
+    if property == datastore_types._KEY_SPECIAL_PROPERTY:
+      return entity.key()