Update Google App Engine to 1.2.2 in thirdparty folder.
authorPawel Solyga <Pawel.Solyga@gmail.com>
Tue, 12 May 2009 15:39:52 +0200
changeset 2309 be1b94099f2d
parent 2307 81c128f487e6
child 2310 d51331eaec15
Update Google App Engine to 1.2.2 in thirdparty folder.
thirdparty/google_appengine/RELEASE_NOTES
thirdparty/google_appengine/VERSION
thirdparty/google_appengine/dev_appserver.py
thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py
thirdparty/google_appengine/google/appengine/api/datastore.py
thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py
thirdparty/google_appengine/google/appengine/api/images/images_stub.py
thirdparty/google_appengine/google/appengine/api/mail_service_pb.py
thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
thirdparty/google_appengine/google/appengine/api/quota.py
thirdparty/google_appengine/google/appengine/api/urlfetch.py
thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py
thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py
thirdparty/google_appengine/google/appengine/api/user_service_pb.py
thirdparty/google_appengine/google/appengine/cron/GrocLexer.py
thirdparty/google_appengine/google/appengine/cron/GrocParser.py
thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py
thirdparty/google_appengine/google/appengine/datastore/entity_pb.py
thirdparty/google_appengine/google/appengine/dist/socket.py
thirdparty/google_appengine/google/appengine/ext/admin/__init__.py
thirdparty/google_appengine/google/appengine/ext/db/__init__.py
thirdparty/google_appengine/google/appengine/ext/gql/__init__.py
thirdparty/google_appengine/google/appengine/ext/remote_api/__init__.py
thirdparty/google_appengine/google/appengine/ext/search/__init__.py
thirdparty/google_appengine/google/appengine/tools/appcfg.py
thirdparty/google_appengine/google/appengine/tools/bulkloader.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver.py
--- a/thirdparty/google_appengine/RELEASE_NOTES	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/RELEASE_NOTES	Tue May 12 15:39:52 2009 +0200
@@ -3,6 +3,40 @@
 
 App Engine Python SDK - Release Notes
 
+Version 1.2.2 - April 22, 2009
+==============================
+
+  - New quota API which returns the CPU usage of the current request.
+      from google.appengine.api import quota
+      cpu_usage_so_far = quota.get_request_cpu_usage()
+  - Urlfetch fetch now has support for user configurable deadlines.
+      http://code.google.com/p/googleappengine/issues/detail?id=79
+  - Urlfetch in the SDK allows the Accept-Encoding header to match App Engine.
+      http://code.google.com/p/googleappengine/issues/detail?id=1071
+  - urllib now supports HTTPS in addition to HTTP
+      http://code.google.com/p/googleappengine/issues/detail?id=1156
+  - Datastore indexes on single properties can now be disabled by setting
+    indexed=False on the property constructor.
+  - Datastore now supports Key-only queries, using either SELECT __key__ or
+    or db.Query(Model, keys_only=True)
+  - Fixed issues with Datastore IN filters and sorting: sort order is now
+    correct, and can be used with __key__.
+      http://code.google.com/p/googleappengine/issues/detail?id=1100
+      http://code.google.com/p/googleappengine/issues/detail?id=1016
+  - Cron supports additional time specification formats.
+      http://code.google.com/p/googleappengine/issues/detail?id=1261
+  - Fixed an issue in the dev_appserver admin console datastore viewer
+    (/_ah/admin/datastore) with sorting columns containing None types.
+      http://code.google.com/p/googleappengine/issues/detail?id=1007
+  - Bulk Loader improvements:  New appcfg download_data command.
+    Better backoff support and debugging output for long requests.
+  - New --vhost flag on appcfg.py request_logs command to select logs for
+    a particular host.
+  - Python _ast module is now available for import
+      http://code.google.com/p/googleappengine/issues/detail?id=779
+  - Fixed issue with the color argument of the Images API composite method.
+
+
 Version 1.2.1 - April 13, 2009
 =============================
 
@@ -33,12 +67,12 @@
       http://code.google.com/p/googleappengine/issues/detail?id=1017
 
 
-
 Version 1.2.0 - March 24, 2009
 ==============================
   - Cron support. Appcfg.py will upload the schedule to App Engine.
       The dev_appserver console at /_ah/admin describes your schedule but does
       not automatically run scheduled jobs. Learn more at
+      http://code.google.com/appengine/docs/python/config/cron.html
   - New allow_skipped_files flag in dev_appserver to allow it to read files
     which are not available in App Engine.
       http://code.google.com/p/googleappengine/issues/detail?id=550
--- a/thirdparty/google_appengine/VERSION	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/VERSION	Tue May 12 15:39:52 2009 +0200
@@ -1,3 +1,3 @@
-release: "1.2.1"
-timestamp: 1238791978
+release: "1.2.2"
+timestamp: 1240438569
 api_versions: ['1']
--- a/thirdparty/google_appengine/dev_appserver.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/dev_appserver.py	Tue May 12 15:39:52 2009 +0200
@@ -21,21 +21,17 @@
 import sys
 
 if not hasattr(sys, 'version_info'):
-  sys.stderr.write('Error: Very old versions of Python are not supported. Please '
-                   'use version 2.5.\n')
+  sys.stderr.write('Very old versions of Python are not supported. Please '
+                   'use version 2.5 or greater.\n')
   sys.exit(1)
 version_tuple = tuple(sys.version_info[:2])
 if version_tuple < (2, 4):
   sys.stderr.write('Error: Python %d.%d is not supported. Please use '
-                   'version 2.5.\n' % version_tuple)
+                   'version 2.5 or greater.\n' % version_tuple)
   sys.exit(1)
 if version_tuple == (2, 4):
   sys.stderr.write('Warning: Python 2.4 is not supported; this program may '
-                   'break. Please use version 2.5.\n')
-if version_tuple > (2, 5):
-  sys.stderr.write('Error: Python %d.%d and is not supported; '
-		   'Please use version 2.5, not greater.\n' % version_tuple)
-  sys.exit(1)
+                   'break. Please use version 2.5 or greater.\n')
 
 DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
 SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Tue May 12 15:39:52 2009 +0200
@@ -89,11 +89,12 @@
     assert self.__state is RPC.IDLE, ('RPC for %s.%s has already been started' %
                                       (self.package, self.call))
     assert self.callback is None or callable(self.callback)
-
     self._MakeCallImpl()
 
   def Wait(self):
     """Waits on the API call associated with this RPC."""
+    assert self.__state is not RPC.IDLE, ('RPC for %s.%s has not been started' %
+                                          (self.package, self.call))
     rpc_completed = self._WaitImpl()
 
     assert rpc_completed, ('RPC for %s.%s was not completed, and no other ' +
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py	Tue May 12 15:39:52 2009 +0200
@@ -274,7 +274,8 @@
   Includes read-only accessors for app id, kind, and primary key. Also
   provides dictionary-style access to properties.
   """
-  def __init__(self, kind, parent=None, _app=None, name=None):
+  def __init__(self, kind, parent=None, _app=None, name=None,
+               unindexed_properties=[]):
     """Constructor. Takes the kind and transaction root, which cannot be
     changed after the entity is constructed, and an optional parent. Raises
     BadArgumentError or BadKeyError if kind is invalid or parent is not an
@@ -287,6 +288,9 @@
       parent: Entity or Key
       # if provided, this entity's name.
       name: string
+      # if provided, a sequence of property names that should not be indexed
+      # by the built-in single property indices.
+      unindexed_properties: list or tuple of strings
     """
     ref = entity_pb.Reference()
     _app = datastore_types.ResolveAppId(_app)
@@ -311,6 +315,15 @@
         raise datastore_errors.BadValueError('name cannot begin with a digit')
       last_path.set_name(name.encode('utf-8'))
 
+    unindexed_properties, multiple = NormalizeAndTypeCheck(unindexed_properties, basestring)
+    if not multiple:
+      raise datastore_errors.BadArgumentError(
+        'unindexed_properties must be a sequence; received %s (a %s).' %
+        (unindexed_properties, typename(unindexed_properties)))
+    for prop in unindexed_properties:
+      datastore_types.ValidateProperty(prop, None)
+    self.__unindexed_properties = frozenset(unindexed_properties)
+
     self.__key = Key._FromPb(ref)
 
   def app(self):
@@ -336,13 +349,17 @@
     return self.key().parent()
 
   def entity_group(self):
-    """Returns this entitys's entity group as a Key.
+    """Returns this entity's entity group as a Key.
 
     Note that the returned Key will be incomplete if this is a a root entity
     and its key is incomplete.
     """
     return self.key().entity_group()
 
+  def unindexed_properties(self):
+    """Returns this entity's unindexed properties, as a frozenset of strings."""
+    return self.__unindexed_properties
+
   def __setitem__(self, name, value):
     """Implements the [] operator. Used to set property value(s).
 
@@ -492,7 +509,8 @@
       if isinstance(sample, list):
         sample = values[0]
 
-      if isinstance(sample, datastore_types._RAW_PROPERTY_TYPES):
+      if (isinstance(sample, datastore_types._RAW_PROPERTY_TYPES) or
+          name in self.__unindexed_properties):
         pb.raw_property_list().extend(properties)
       else:
         pb.property_list().extend(properties)
@@ -530,7 +548,10 @@
       assert last_path.has_name()
       assert last_path.name()
 
-    e = Entity(unicode(last_path.type().decode('utf-8')))
+    unindexed_properties = [p.name() for p in pb.raw_property_list()]
+
+    e = Entity(unicode(last_path.type().decode('utf-8')),
+               unindexed_properties=unindexed_properties)
     ref = e.__key._Key__reference
     ref.CopyFrom(pb.key())
 
@@ -538,11 +559,6 @@
 
     for prop_list in (pb.property_list(), pb.raw_property_list()):
       for prop in prop_list:
-        if not prop.has_multiple():
-          raise datastore_errors.Error(
-            'Property %s is corrupt in the datastore; it\'s missing the '
-            'multiple valued field.' % prop.name())
-
         try:
           value = datastore_types.FromPropertyPb(prop)
         except (AssertionError, AttributeError, TypeError, ValueError), e:
@@ -684,7 +700,7 @@
   __inequality_prop = None
   __inequality_count = 0
 
-  def __init__(self, kind, filters={}, _app=None):
+  def __init__(self, kind, filters={}, _app=None, keys_only=False):
     """Constructor.
 
     Raises BadArgumentError if kind is not a string. Raises BadValueError or
@@ -692,9 +708,10 @@
 
     Args:
       # kind is required. filters is optional; if provided, it's used
-      # as an initial set of property filters.
+      # as an initial set of property filters. keys_only defaults to False.
       kind: string
       filters: dict
+      keys_only: boolean
     """
     datastore_types.ValidateString(kind, 'kind',
                                    datastore_errors.BadArgumentError)
@@ -705,6 +722,7 @@
     self.update(filters)
 
     self.__app = datastore_types.ResolveAppId(_app)
+    self.__keys_only = keys_only
 
   def Order(self, *orderings):
     """Specify how the query results should be sorted.
@@ -847,6 +865,10 @@
     self.__ancestor.CopyFrom(key._Key__reference)
     return self
 
+  def IsKeysOnly(self):
+    """Returns True if this query is keys only, false otherwise."""
+    return self.__keys_only
+
   def Run(self):
     """Runs this query.
 
@@ -890,7 +912,7 @@
         raise datastore_errors.NeedIndexError(
           str(exc) + '\nThis query needs this index:\n' + yaml)
 
-    return Iterator._FromPb(result.cursor())
+    return Iterator._FromPb(result)
 
   def Get(self, limit, offset=0):
     """Fetches and returns a maximum number of results from the query.
@@ -1120,6 +1142,7 @@
     pb = datastore_pb.Query()
 
     pb.set_kind(self.__kind.encode('utf-8'))
+    pb.set_keys_only(bool(self.__keys_only))
     if self.__app:
       pb.set_app(self.__app.encode('utf-8'))
     if limit is not None:
@@ -1171,6 +1194,11 @@
 
   This class is actually a subclass of datastore.Query as it is intended to act
   like a normal Query object (supporting the same interface).
+
+  Does not support keys only queries, since it needs whole entities in order
+  to merge sort them. (That's not true if there are no sort orders, or if the
+  sort order is on __key__, but allowing keys only queries in those cases, but
+  not in others, would be confusing.)
   """
 
   def __init__(self, bound_queries, orderings):
@@ -1179,6 +1207,12 @@
           'Cannot satisfy query -- too many subqueries (max: %d, got %d).'
           ' Probable cause: too many IN/!= filters in query.' %
           (MAX_ALLOWABLE_QUERIES, len(bound_queries)))
+
+    for query in bound_queries:
+      if query.IsKeysOnly():
+        raise datastore_errors.BadQueryError(
+            'MultiQuery does not support keys_only.')
+
     self.__bound_queries = bound_queries
     self.__orderings = orderings
 
@@ -1294,7 +1328,7 @@
       return 0
 
     def __GetValueForId(self, sort_order_entity, identifier, sort_order):
-      value = sort_order_entity.__entity[identifier]
+      value = _GetPropertyValue(sort_order_entity.__entity, identifier)
       entity_key = sort_order_entity.__entity.key()
       if (entity_key, identifier) in self.__min_max_value_cache:
         value = self.__min_max_value_cache[(entity_key, identifier)]
@@ -1479,10 +1513,11 @@
   > for person in it:
   >   print 'Hi, %s!' % person['name']
   """
-  def __init__(self, cursor):
+  def __init__(self, cursor, keys_only=False):
     self.__cursor = cursor
     self.__buffer = []
     self.__more_results = True
+    self.__keys_only = keys_only
 
   def _Next(self, count):
     """Returns the next result(s) of the query.
@@ -1490,31 +1525,29 @@
     Not intended to be used by application developers. Use the python
     iterator protocol instead.
 
-    This method returns the next entities from the list of resulting
-    entities that matched the query. If the query specified a sort
-    order, entities are returned in that order. Otherwise, the order
-    is undefined.
+    This method returns the next entities or keys from the list of matching
+    results. If the query specified a sort order, results are returned in that
+    order. Otherwise, the order is undefined.
 
-    The argument specifies the number of entities to return. If it's
-    greater than the number of remaining entities, all of the
-    remaining entities are returned. In that case, the length of the
-    returned list will be smaller than count.
+    The argument specifies the number of results to return. If it's greater
+    than the number of remaining results, all of the remaining results are
+    returned. In that case, the length of the returned list will be smaller
+    than count.
 
-    There is an internal buffer for use with the next() method.  If
-    this buffer is not empty, up to 'count' values are removed from
-    this buffer and returned.  It's best not to mix _Next() and
-    next().
+    There is an internal buffer for use with the next() method. If this buffer
+    is not empty, up to 'count' values are removed from this buffer and
+    returned. It's best not to mix _Next() and next().
 
-    The results are always returned as a list. If there are no results
-    left, an empty list is returned.
+    The results are always returned as a list. If there are no results left,
+    an empty list is returned.
 
     Args:
-      # the number of entities to return; must be >= 1
+      # the number of results to return; must be >= 1
       count: int or long
 
     Returns:
-      # a list of entities
-      [Entity, ...]
+      # a list of entities or keys
+      [Entity or Key, ...]
     """
     if not isinstance(count, (int, long)) or count <= 0:
       raise datastore_errors.BadArgumentError(
@@ -1539,8 +1572,10 @@
 
     self.__more_results = result.more_results()
 
-    ret = [Entity._FromPb(r) for r in result.result_list()]
-    return ret
+    if self.__keys_only:
+      return [Key._FromPb(e.key()) for e in result.result_list()]
+    else:
+      return [Entity._FromPb(e) for e in result.result_list()]
 
   _BUFFER_SIZE = 20
 
@@ -1570,18 +1605,16 @@
   @staticmethod
   def _FromPb(pb):
     """Static factory method. Returns the Iterator representation of the given
-    protocol buffer (datastore_pb.Cursor). Not intended to be used by
-    application developers. Enforced by not hiding the datastore_pb classes.
+    protocol buffer (datastore_pb.QueryResult). Not intended to be used by
+    application developers. Enforced by hiding the datastore_pb classes.
 
     Args:
-      # a protocol buffer Cursor
-      pb: datastore_pb.Cursor
+      pb: datastore_pb.QueryResult
 
     Returns:
-      # the Iterator representation of the argument
       Iterator
     """
-    return Iterator(pb.cursor())
+    return Iterator(pb.cursor().cursor(), keys_only=pb.keys_only())
 
 
 class _Transaction(object):
@@ -1920,6 +1953,28 @@
   return key
 
 
+def _GetPropertyValue(entity, property):
+  """Returns an entity's value for a given property name.
+
+  Handles special properties like __key__ as well as normal properties.
+
+  Args:
+    entity: datastore.Entity
+    property: str; the property name
+
+  Returns:
+    property value. For __key__, a datastore_types.Key.
+
+  Raises:
+    KeyError, if the entity does not have the given property.
+  """
+  if property in datastore_types._SPECIAL_PROPERTIES:
+    assert property == datastore_types._KEY_SPECIAL_PROPERTY
+    return entity.key()
+  else:
+    return entity[property]
+
+
 def _AddOrAppend(dictionary, key, value):
   """Adds the value to the existing values in the dictionary, if any.
 
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Tue May 12 15:39:52 2009 +0200
@@ -98,6 +98,44 @@
     self.native = datastore.Entity._FromPb(entity)
 
 
+class _Cursor(object):
+  """A query cursor.
+
+  Public properties:
+    cursor: the integer cursor
+    count: the original total number of results
+    keys_only: whether the query is keys_only
+  """
+  def __init__(self, results, keys_only):
+    """Constructor.
+
+    Args:
+      # the query results, in order, such that pop(0) is the next result
+      results: list of entity_pb.EntityProto
+      keys_only: integer
+    """
+    self.__results = results
+    self.count = len(results)
+    self.keys_only = keys_only
+    self.cursor = id(self)
+
+  def PopulateQueryResult(self, result, count):
+    """Populates a QueryResult with this cursor and the given number of results.
+
+    Args:
+      result: datastore_pb.QueryResult
+      count: integer
+    """
+    result.mutable_cursor().set_cursor(self.cursor)
+    result.set_keys_only(self.keys_only)
+
+    results_pbs = [r._ToPb() for r in self.__results[:count]]
+    result.result_list().extend(results_pbs)
+    del self.__results[:count]
+
+    result.set_more_results(len(self.__results) > 0)
+
+
 class DatastoreFileStub(apiproxy_stub.APIProxyStub):
   """ Persistent stub for the Python datastore API.
 
@@ -189,11 +227,9 @@
     self.__query_history = {}
 
     self.__next_id = 1
-    self.__next_cursor = 1
     self.__next_tx_handle = 1
     self.__next_index_id = 1
     self.__id_lock = threading.Lock()
-    self.__cursor_lock = threading.Lock()
     self.__tx_handle_lock = threading.Lock()
     self.__index_id_lock = threading.Lock()
     self.__tx_lock = threading.Lock()
@@ -581,6 +617,22 @@
                  datastore_pb.Query_Filter.EQUAL:                 '==',
                  }
 
+    def has_prop_indexed(entity, prop):
+      """Returns True if prop is in the entity and is indexed."""
+      if prop in datastore_types._SPECIAL_PROPERTIES:
+        return True
+      elif prop in entity.unindexed_properties():
+        return False
+
+      values = entity.get(prop, [])
+      if not isinstance(values, (tuple, list)):
+        values = [values]
+
+      for value in values:
+        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
+          return True
+      return False
+
     for filt in query.filter_list():
       assert filt.op() != datastore_pb.Query_Filter.IN
 
@@ -590,20 +642,24 @@
       filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                          for filter_prop in filt.property_list()]
 
-      def passes(entity):
-        """ Returns True if the entity passes the filter, False otherwise. """
-        if prop in datastore_types._SPECIAL_PROPERTIES:
-          entity_vals = self.__GetSpecialPropertyValue(entity, prop)
-        else:
-          entity_vals = entity.get(prop, [])
+      def passes_filter(entity):
+        """Returns True if the entity passes the filter, False otherwise.
+
+        The filter being evaluated is filt, the current filter that we're on
+        in the list of filters in the query.
+        """
+        if not has_prop_indexed(entity, prop):
+          return False
+
+        try:
+          entity_vals = datastore._GetPropertyValue(entity, prop)
+        except KeyError:
+          entity_vals = []
 
         if not isinstance(entity_vals, list):
           entity_vals = [entity_vals]
 
         for fixed_entity_val in entity_vals:
-          if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
-            continue
-
           for filter_val in filter_val_list:
             fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
               fixed_entity_val.__class__)
@@ -627,22 +683,7 @@
 
         return False
 
-      results = filter(passes, results)
-
-    def has_prop_indexed(entity, prop):
-      """Returns True if prop is in the entity and is not a raw property, or
-      is a special property."""
-      if prop in datastore_types._SPECIAL_PROPERTIES:
-        return True
-
-      values = entity.get(prop, [])
-      if not isinstance(values, (tuple, list)):
-        values = [values]
-
-      for value in values:
-        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
-          return True
-      return False
+      results = filter(passes_filter, results)
 
     for order in query.order_list():
       prop = order.property().decode('utf-8')
@@ -658,17 +699,13 @@
 
         reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
 
-        if prop in datastore_types._SPECIAL_PROPERTIES:
-          a_val = self.__GetSpecialPropertyValue(a, prop)
-          b_val = self.__GetSpecialPropertyValue(b, prop)
-        else:
-          a_val = a[prop]
-          if isinstance(a_val, list):
-            a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
+        a_val = datastore._GetPropertyValue(a, prop)
+        if isinstance(a_val, list):
+          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
 
-          b_val = b[prop]
-          if isinstance(b_val, list):
-            b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
+        b_val = datastore._GetPropertyValue(b, prop)
+        if isinstance(b_val, list):
+          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
 
         cmped = order_compare_properties(a_val, b_val)
 
@@ -725,39 +762,27 @@
       self.__query_history[clone] = 1
     self.__WriteHistory()
 
-    self.__cursor_lock.acquire()
-    cursor = self.__next_cursor
-    self.__next_cursor += 1
-    self.__cursor_lock.release()
-    self.__queries[cursor] = (results, len(results))
-
-    query_result.mutable_cursor().set_cursor(cursor)
-    query_result.set_more_results(len(results) > 0)
+    cursor = _Cursor(results, query.keys_only())
+    self.__queries[cursor.cursor] = cursor
+    cursor.PopulateQueryResult(query_result, 0)
 
   def _Dynamic_Next(self, next_request, query_result):
-    cursor = next_request.cursor().cursor()
+    cursor_handle = next_request.cursor().cursor()
 
     try:
-      results, orig_count = self.__queries[cursor]
+      cursor = self.__queries[cursor_handle]
     except KeyError:
-      raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
-                                             'Cursor %d not found' % cursor)
+      raise apiproxy_errors.ApplicationError(
+          datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
 
-    count = next_request.count()
-
-    results_pb = [r._ToPb() for r in results[:count]]
-    query_result.result_list().extend(results_pb)
-    del results[:count]
-
-    query_result.set_more_results(len(results) > 0)
+    cursor.PopulateQueryResult(query_result, next_request.count())
 
   def _Dynamic_Count(self, query, integer64proto):
     self.__ValidateAppId(query.app())
     query_result = datastore_pb.QueryResult()
     self._Dynamic_RunQuery(query, query_result)
     cursor = query_result.cursor().cursor()
-    results, count = self.__queries[cursor]
-    integer64proto.set_value(count)
+    integer64proto.set_value(self.__queries[cursor].count)
     del self.__queries[cursor]
 
   def _Dynamic_BeginTransaction(self, request, transaction):
@@ -945,23 +970,3 @@
           return stored_index
 
     return None
-
-  @classmethod
-  def __GetSpecialPropertyValue(cls, entity, property):
-    """Returns an entity's value for a special property.
-
-    Right now, the only special property is __key__, whose value is the
-    entity's key.
-
-    Args:
-      entity: datastore.Entity
-
-    Returns:
-      property value. For __key__, a datastore_types.Key.
-
-    Raises:
-      AssertionError, if the given property is not special.
-    """
-    assert property in datastore_types._SPECIAL_PROPERTIES
-    if property == datastore_types._KEY_SPECIAL_PROPERTY:
-      return entity.key()
--- a/thirdparty/google_appengine/google/appengine/api/images/images_stub.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/images/images_stub.py	Tue May 12 15:39:52 2009 +0200
@@ -36,6 +36,22 @@
 from google.appengine.runtime import apiproxy_errors
 
 
+def _ArgbToRgbaTuple(argb):
+  """Convert from a single ARGB value to a tuple containing RGBA.
+
+  Args:
+    argb: Signed 32 bit integer containing an ARGB value.
+
+  Returns:
+    RGBA tuple.
+  """
+  unsigned_argb = argb % 0x100000000
+  return ((unsigned_argb >> 16) & 0xFF,
+          (unsigned_argb >> 8) & 0xFF,
+          unsigned_argb & 0xFF,
+          (unsigned_argb >> 24) & 0xFF)
+
+
 class ImagesServiceStub(apiproxy_stub.APIProxyStub):
   """Stub version of images API to be used with the dev_appserver."""
 
@@ -60,10 +76,8 @@
     """
     width = request.canvas().width()
     height = request.canvas().height()
-    color = request.canvas().color() % 0x100000000
-    reordered_color = int((color & 0xff000000) | ((color >> 16) & 0xff) |
-                          (color & 0xff00) | (color & 0xff) << 16)
-    canvas = Image.new("RGBA", (width, height), reordered_color)
+    color = _ArgbToRgbaTuple(request.canvas().color())
+    canvas = Image.new("RGBA", (width, height), color)
     sources = []
     if (not request.canvas().width() or request.canvas().width() > 4000 or
         not request.canvas().height() or request.canvas().height() > 4000):
--- a/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Tue May 12 15:39:52 2009 +0200
@@ -22,7 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import VoidProto
+from google.appengine.api.api_base_pb import *
 class MailServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Tue May 12 15:39:52 2009 +0200
@@ -22,7 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import VoidProto
+from google.appengine.api.api_base_pb import *
 class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/quota.py	Tue May 12 15:39:52 2009 +0200
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Access to quota usage for this application."""
+
+
+
+
+try:
+  from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
+except ImportError:
+  _apphosting_runtime___python__apiproxy = None
+
+def get_request_cpu_usage():
+  """Get the amount of CPU used so far for the current request.
+
+  Returns the number of megacycles used so far for the current
+  request. Does not include CPU used by API calls.
+
+  Does nothing when used in the dev_appserver.
+  """
+
+  if _apphosting_runtime___python__apiproxy:
+    return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
+  return 0
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Tue May 12 15:39:52 2009 +0200
@@ -30,6 +30,7 @@
 import urllib2
 import urlparse
 
+from google.appengine.api import apiproxy_rpc
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.api import urlfetch_service_pb
 from google.appengine.api.urlfetch_errors import *
@@ -186,13 +187,29 @@
   return False
 
 
+def __create_rpc(deadline=None, callback=None):
+  """DO NOT USE.  WILL CHANGE AND BREAK YOUR CODE.
+
+  Creates an RPC object for use with the urlfetch API.
+
+  Args:
+    deadline: deadline in seconds for the operation.
+    callback: callable to invoke on completion.
+
+  Returns:
+    A _URLFetchRPC object.
+  """
+  return _URLFetchRPC(deadline, callback)
+
+
 def fetch(url, payload=None, method=GET, headers={}, allow_truncated=False,
-          follow_redirects=True):
+          follow_redirects=True, deadline=None):
   """Fetches the given HTTP URL, blocking until the result is returned.
 
   Other optional parameters are:
      method: GET, POST, HEAD, PUT, or DELETE
-     payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE)
+     payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE).
+       this is ignored if the method is not POST or PUT.
      headers: dictionary of HTTP headers to send with the request
      allow_truncated: if true, truncate large responses and return them without
        error. otherwise, ResponseTooLargeError will be thrown when a response is
@@ -204,6 +221,7 @@
        information.  If false, you see the HTTP response yourself,
        including the 'Location' header, and redirects are not
        followed.
+     deadline: deadline in seconds for the operation.
 
   We use a HTTP/1.1 compliant proxy to fetch the result.
 
@@ -218,73 +236,173 @@
   of the returned structure, so HTTP errors like 404 do not result in an
   exception.
   """
-  if isinstance(method, basestring):
-    method = method.upper()
-  method = _URL_STRING_MAP.get(method, method)
-  if method not in _VALID_METHODS:
-    raise InvalidMethodError('Invalid method %s.' % str(method))
+  rpc = __create_rpc(deadline=deadline)
+  rpc.make_call(url, payload, method, headers, follow_redirects)
+  return rpc.get_result(allow_truncated)
+
+
+class _URLFetchRPC(object):
+  """A RPC object that manages the urlfetch RPC.
+
+  Its primary functions are the following:
+  1. Convert error codes to the URLFetchServiceError namespace and raise them
+     when get_result is called.
+  2. Wrap the urlfetch response with a _URLFetchResult object.
+  """
 
-  if _is_fetching_self(url, method):
-    raise InvalidURLError("App cannot fetch the same URL as the one used for "
-                          "the request.")
+  def __init__(self, deadline=None, callback=None):
+    """Construct a new url fetch RPC.
 
-  request = urlfetch_service_pb.URLFetchRequest()
-  response = urlfetch_service_pb.URLFetchResponse()
-  request.set_url(url)
+    Args:
+      deadline: deadline in seconds for the operation.
+      callback: callable to invoke on completion.
+    """
+    self.__rpc = apiproxy_stub_map.CreateRPC('urlfetch')
+    self.__rpc.deadline = deadline
+    self.__rpc.callback = callback
+    self.__called_hooks = False
+
+  def make_call(self, url, payload=None, method=GET, headers={},
+                follow_redirects=True):
+    """Executes the RPC call to fetch a given HTTP URL.
 
-  if method == GET:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
-  elif method == POST:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
-  elif method == HEAD:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
-  elif method == PUT:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
-  elif method == DELETE:
-    request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
+    See urlfetch.fetch for a thorough description of arguments.
+    """
+    assert self.__rpc.state is apiproxy_rpc.RPC.IDLE
+    if isinstance(method, basestring):
+      method = method.upper()
+    method = _URL_STRING_MAP.get(method, method)
+    if method not in _VALID_METHODS:
+      raise InvalidMethodError('Invalid method %s.' % str(method))
+
+    if _is_fetching_self(url, method):
+      raise InvalidURLError("App cannot fetch the same URL as the one used for "
+                            "the request.")
+
+    self.__request = urlfetch_service_pb.URLFetchRequest()
+    self.__response = urlfetch_service_pb.URLFetchResponse()
+    self.__result = None
+    self.__request.set_url(url)
 
-  if payload and (method == POST or method == PUT):
-    request.set_payload(payload)
+    if method == GET:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
+    elif method == POST:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
+    elif method == HEAD:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
+    elif method == PUT:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
+    elif method == DELETE:
+      self.__request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
+
+    if payload and (method == POST or method == PUT):
+      self.__request.set_payload(payload)
+
+    for key, value in headers.iteritems():
+      header_proto = self.__request.add_header()
+      header_proto.set_key(key)
+      header_proto.set_value(str(value))
+
+    self.__request.set_followredirects(follow_redirects)
+    if self.__rpc.deadline:
+      self.__request.set_deadline(self.__rpc.deadline)
+
+    apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(
+        'urlfetch', 'Fetch', self.__request, self.__response)
+    self.__rpc.MakeCall('urlfetch', 'Fetch', self.__request, self.__response)
 
-  for key, value in headers.iteritems():
-    header_proto = request.add_header()
-    header_proto.set_key(key)
-    header_proto.set_value(str(value))
+  def wait(self):
+    """Waits for the urlfetch RPC to finish.  Idempotent.
+    """
+    assert self.__rpc.state is not apiproxy_rpc.RPC.IDLE
+    if self.__rpc.state is apiproxy_rpc.RPC.RUNNING:
+      self.__rpc.Wait()
+
+  def check_success(self, allow_truncated=False):
+    """Check success and convert RPC exceptions to urlfetch exceptions.
+
+    This method waits for the RPC if it has not yet finished, and calls the
+    post-call hooks on the first invocation.
 
-  request.set_followredirects(follow_redirects)
+    Args:
+      allow_truncated: if False, an error is raised if the response was
+        truncated.
+
+    Raises:
+      InvalidURLError if the url was invalid.
+      DownloadError if there was a problem fetching the url.
+      ResponseTooLargeError if the response was either truncated (and
+        allow_truncated is false) or if it was too big for us to download.
+    """
+    assert self.__rpc.state is not apiproxy_rpc.RPC.IDLE
+    if self.__rpc.state is apiproxy_rpc.RPC.RUNNING:
+      self.wait()
 
-  try:
-    apiproxy_stub_map.MakeSyncCall('urlfetch', 'Fetch', request, response)
-  except apiproxy_errors.ApplicationError, e:
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
-      raise InvalidURLError(str(e))
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
-      raise DownloadError(str(e))
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
-      raise DownloadError(str(e))
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
-      raise ResponseTooLargeError(None)
-    if (e.application_error ==
-        urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
-      raise DownloadError(str(e))
-    raise e
-  result = _URLFetchResult(response)
+    try:
+      self.__rpc.CheckSuccess()
+      if not self.__called_hooks:
+        self.__called_hooks = True
+        apiproxy_stub_map.apiproxy.GetPostCallHooks().Call(
+            'urlfetch', 'Fetch', self.__request, self.__response)
+    except apiproxy_errors.ApplicationError, e:
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
+        raise InvalidURLError(str(e))
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
+        raise DownloadError(str(e))
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
+        raise DownloadError(str(e))
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
+        raise ResponseTooLargeError(None)
+      if (e.application_error ==
+          urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
+        raise DownloadError(str(e))
+      raise e
 
-  if not allow_truncated and response.contentwastruncated():
-    raise ResponseTooLargeError(result)
+    if self.__response.contentwastruncated() and not allow_truncated:
+      raise ResponseTooLargeError(_URLFetchResult(self.__response))
+
+  def get_result(self, allow_truncated=False):
+    """Returns the RPC result or raises an exception if the rpc failed.
+
+    This method waits for the RPC if not completed, and checks success.
+
+    Args:
+      allow_truncated: if False, an error is raised if the response was
+        truncated.
 
-  return result
+    Returns:
+      The urlfetch result.
+
+    Raises:
+      Error if the rpc has not yet finished.
+      InvalidURLError if the url was invalid.
+      DownloadError if there was a problem fetching the url.
+      ResponseTooLargeError if the response was either truncated (and
+        allow_truncated is false) or if it was too big for us to download.
+    """
+    if self.__result is None:
+      self.check_success(allow_truncated)
+      self.__result = _URLFetchResult(self.__response)
+    return self.__result
+
 
 Fetch = fetch
 
 
 class _URLFetchResult(object):
-  """A Pythonic representation of our fetch response protocol buffer."""
+  """A Pythonic representation of our fetch response protocol buffer.
+  """
+
   def __init__(self, response_proto):
+    """Constructor.
+
+    Args:
+      response_proto: the URLFetchResponse proto buffer to wrap.
+    """
     self.__pb = response_proto
     self.content = response_proto.content()
     self.status_code = response_proto.statuscode()
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Tue May 12 15:39:52 2009 +0200
@@ -217,6 +217,8 @@
   payload_ = ""
   has_followredirects_ = 0
   followredirects_ = 1
+  has_deadline_ = 0
+  deadline_ = 0.0
 
   def __init__(self, contents=None):
     self.header_ = []
@@ -290,6 +292,19 @@
 
   def has_followredirects(self): return self.has_followredirects_
 
+  def deadline(self): return self.deadline_
+
+  def set_deadline(self, x):
+    self.has_deadline_ = 1
+    self.deadline_ = x
+
+  def clear_deadline(self):
+    if self.has_deadline_:
+      self.has_deadline_ = 0
+      self.deadline_ = 0.0
+
+  def has_deadline(self): return self.has_deadline_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -298,6 +313,7 @@
     for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
     if (x.has_payload()): self.set_payload(x.payload())
     if (x.has_followredirects()): self.set_followredirects(x.followredirects())
+    if (x.has_deadline()): self.set_deadline(x.deadline())
 
   def Equals(self, x):
     if x is self: return 1
@@ -312,6 +328,8 @@
     if self.has_payload_ and self.payload_ != x.payload_: return 0
     if self.has_followredirects_ != x.has_followredirects_: return 0
     if self.has_followredirects_ and self.followredirects_ != x.followredirects_: return 0
+    if self.has_deadline_ != x.has_deadline_: return 0
+    if self.has_deadline_ and self.deadline_ != x.deadline_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -336,6 +354,7 @@
     for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
     if (self.has_payload_): n += 1 + self.lengthString(len(self.payload_))
     if (self.has_followredirects_): n += 2
+    if (self.has_deadline_): n += 9
     return n + 2
 
   def Clear(self):
@@ -344,6 +363,7 @@
     self.clear_header()
     self.clear_payload()
     self.clear_followredirects()
+    self.clear_deadline()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(8)
@@ -360,6 +380,9 @@
     if (self.has_followredirects_):
       out.putVarInt32(56)
       out.putBoolean(self.followredirects_)
+    if (self.has_deadline_):
+      out.putVarInt32(65)
+      out.putDouble(self.deadline_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -379,6 +402,9 @@
       if tt == 56:
         self.set_followredirects(d.getBoolean())
         continue
+      if tt == 65:
+        self.set_deadline(d.getDouble())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -397,6 +423,7 @@
       cnt+=1
     if self.has_payload_: res+=prefix+("Payload: %s\n" % self.DebugFormatString(self.payload_))
     if self.has_followredirects_: res+=prefix+("FollowRedirects: %s\n" % self.DebugFormatBool(self.followredirects_))
+    if self.has_deadline_: res+=prefix+("Deadline: %s\n" % self.DebugFormat(self.deadline_))
     return res
 
   kMethod = 1
@@ -406,6 +433,7 @@
   kHeaderValue = 5
   kPayload = 6
   kFollowRedirects = 7
+  kDeadline = 8
 
   _TEXT = (
    "ErrorCode",
@@ -416,6 +444,7 @@
    "Value",
    "Payload",
    "FollowRedirects",
+   "Deadline",
   )
 
   _TYPES = (
@@ -434,6 +463,8 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.DOUBLE,
+
   )
 
   _STYLE = """"""
@@ -542,6 +573,10 @@
   statuscode_ = 0
   has_contentwastruncated_ = 0
   contentwastruncated_ = 0
+  has_externalbytessent_ = 0
+  externalbytessent_ = 0
+  has_externalbytesreceived_ = 0
+  externalbytesreceived_ = 0
 
   def __init__(self, contents=None):
     self.header_ = []
@@ -602,6 +637,32 @@
 
   def has_contentwastruncated(self): return self.has_contentwastruncated_
 
+  def externalbytessent(self): return self.externalbytessent_
+
+  def set_externalbytessent(self, x):
+    self.has_externalbytessent_ = 1
+    self.externalbytessent_ = x
+
+  def clear_externalbytessent(self):
+    if self.has_externalbytessent_:
+      self.has_externalbytessent_ = 0
+      self.externalbytessent_ = 0
+
+  def has_externalbytessent(self): return self.has_externalbytessent_
+
+  def externalbytesreceived(self): return self.externalbytesreceived_
+
+  def set_externalbytesreceived(self, x):
+    self.has_externalbytesreceived_ = 1
+    self.externalbytesreceived_ = x
+
+  def clear_externalbytesreceived(self):
+    if self.has_externalbytesreceived_:
+      self.has_externalbytesreceived_ = 0
+      self.externalbytesreceived_ = 0
+
+  def has_externalbytesreceived(self): return self.has_externalbytesreceived_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -609,6 +670,8 @@
     if (x.has_statuscode()): self.set_statuscode(x.statuscode())
     for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
     if (x.has_contentwastruncated()): self.set_contentwastruncated(x.contentwastruncated())
+    if (x.has_externalbytessent()): self.set_externalbytessent(x.externalbytessent())
+    if (x.has_externalbytesreceived()): self.set_externalbytesreceived(x.externalbytesreceived())
 
   def Equals(self, x):
     if x is self: return 1
@@ -621,6 +684,10 @@
       if e1 != e2: return 0
     if self.has_contentwastruncated_ != x.has_contentwastruncated_: return 0
     if self.has_contentwastruncated_ and self.contentwastruncated_ != x.contentwastruncated_: return 0
+    if self.has_externalbytessent_ != x.has_externalbytessent_: return 0
+    if self.has_externalbytessent_ and self.externalbytessent_ != x.externalbytessent_: return 0
+    if self.has_externalbytesreceived_ != x.has_externalbytesreceived_: return 0
+    if self.has_externalbytesreceived_ and self.externalbytesreceived_ != x.externalbytesreceived_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -640,6 +707,8 @@
     n += 2 * len(self.header_)
     for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
     if (self.has_contentwastruncated_): n += 2
+    if (self.has_externalbytessent_): n += 1 + self.lengthVarInt64(self.externalbytessent_)
+    if (self.has_externalbytesreceived_): n += 1 + self.lengthVarInt64(self.externalbytesreceived_)
     return n + 1
 
   def Clear(self):
@@ -647,6 +716,8 @@
     self.clear_statuscode()
     self.clear_header()
     self.clear_contentwastruncated()
+    self.clear_externalbytessent()
+    self.clear_externalbytesreceived()
 
   def OutputUnchecked(self, out):
     if (self.has_content_):
@@ -661,6 +732,12 @@
     if (self.has_contentwastruncated_):
       out.putVarInt32(48)
       out.putBoolean(self.contentwastruncated_)
+    if (self.has_externalbytessent_):
+      out.putVarInt32(56)
+      out.putVarInt64(self.externalbytessent_)
+    if (self.has_externalbytesreceived_):
+      out.putVarInt32(64)
+      out.putVarInt64(self.externalbytesreceived_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -677,6 +754,12 @@
       if tt == 48:
         self.set_contentwastruncated(d.getBoolean())
         continue
+      if tt == 56:
+        self.set_externalbytessent(d.getVarInt64())
+        continue
+      if tt == 64:
+        self.set_externalbytesreceived(d.getVarInt64())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -694,6 +777,8 @@
       res+=prefix+"}\n"
       cnt+=1
     if self.has_contentwastruncated_: res+=prefix+("ContentWasTruncated: %s\n" % self.DebugFormatBool(self.contentwastruncated_))
+    if self.has_externalbytessent_: res+=prefix+("ExternalBytesSent: %s\n" % self.DebugFormatInt64(self.externalbytessent_))
+    if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
     return res
 
   kContent = 1
@@ -702,6 +787,8 @@
   kHeaderKey = 4
   kHeaderValue = 5
   kContentWasTruncated = 6
+  kExternalBytesSent = 7
+  kExternalBytesReceived = 8
 
   _TEXT = (
    "ErrorCode",
@@ -711,6 +798,8 @@
    "Key",
    "Value",
    "ContentWasTruncated",
+   "ExternalBytesSent",
+   "ExternalBytesReceived",
   )
 
   _TYPES = (
@@ -727,6 +816,10 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.NUMERIC,
+
+   ProtocolBuffer.Encoder.NUMERIC,
+
   )
 
   _STYLE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Tue May 12 15:39:52 2009 +0200
@@ -51,7 +51,6 @@
 
 
 _UNTRUSTED_REQUEST_HEADERS = frozenset([
-  'accept-encoding',
   'content-length',
   'host',
   'referer',
@@ -112,13 +111,17 @@
                                                   request.header_list())
     request.clear_header()
     request.header_list().extend(sanitized_headers)
+    deadline = _API_CALL_DEADLINE
+    if request.has_deadline():
+      deadline = request.deadline()
 
     self._RetrieveURL(request.url(), payload, method,
                       request.header_list(), response,
-                      follow_redirects=request.followredirects())
+                      follow_redirects=request.followredirects(),
+                      deadline=deadline)
 
   def _RetrieveURL(self, url, payload, method, headers, response,
-                   follow_redirects=True):
+                   follow_redirects=True, deadline=_API_CALL_DEADLINE):
     """Retrieves a URL.
 
     Args:
@@ -129,6 +132,7 @@
       response: Response object
       follow_redirects: optional setting (defaulting to True) for whether or not
         we should transparently follow redirects (up to MAX_REDIRECTS)
+      deadline: Number of seconds to wait for the urlfetch to finish.
 
     Raises:
       Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR
@@ -195,7 +199,7 @@
 
         orig_timeout = socket.getdefaulttimeout()
         try:
-          socket.setdefaulttimeout(_API_CALL_DEADLINE)
+          socket.setdefaulttimeout(deadline)
           connection.request(method, full_path, payload, adjusted_headers)
           http_response = connection.getresponse()
           http_response_data = http_response.read()
@@ -238,4 +242,9 @@
       untrusted_headers: set of untrusted headers names
       headers: list of string pairs, first is header name and the second is header's value
     """
+    prohibited_headers = [h.key() for h in headers
+                          if h.key().lower() in untrusted_headers]
+    if prohibited_headers:
+      logging.warn("Stripped prohibited headers from URLFetch request: %s",
+                   prohibited_headers)
     return (h for h in headers if h.key().lower() not in untrusted_headers)
--- a/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Tue May 12 15:39:52 2009 +0200
@@ -22,7 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import StringProto
+from google.appengine.api.api_base_pb import *
 class UserServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
--- a/thirdparty/google_appengine/google/appengine/cron/GrocLexer.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/cron/GrocLexer.py	Tue May 12 15:39:52 2009 +0200
@@ -23,39 +23,40 @@
 HIDDEN = BaseRecognizer.HIDDEN
 
 THIRD=12
-SEPTEMBER=34
+SEPTEMBER=35
 FOURTH=13
 SECOND=11
-WEDNESDAY=20
-NOVEMBER=36
-SATURDAY=23
-JULY=32
-APRIL=29
+WEDNESDAY=21
+NOVEMBER=37
+SATURDAY=24
+JULY=33
+APRIL=30
 DIGITS=8
-OCTOBER=35
-MAY=30
+OCTOBER=36
+MAY=31
 EVERY=6
-FEBRUARY=27
-MONDAY=18
-SUNDAY=24
-JUNE=31
+FEBRUARY=28
+MONDAY=19
+SUNDAY=25
+DAY=18
+JUNE=32
 OF=4
-MARCH=28
+MARCH=29
 EOF=-1
-JANUARY=26
-MONTH=25
-FRIDAY=22
+JANUARY=27
+MONTH=26
+FRIDAY=23
 MINUTES=17
 FIFTH=14
 TIME=5
-WS=39
-QUARTER=38
-THURSDAY=21
+WS=40
+QUARTER=39
+THURSDAY=22
 COMMA=9
-DECEMBER=37
-AUGUST=33
+DECEMBER=38
+AUGUST=34
 DIGIT=7
-TUESDAY=19
+TUESDAY=20
 HOURS=16
 FOURTH_OR_FIFTH=15
 FIRST=10
@@ -100,10 +101,10 @@
             if LA1 == 48:
                 LA1_1 = self.input.LA(2)
 
-                if ((48 <= LA1_1 <= 57)) :
+                if (LA1_1 == 58) :
+                    alt1 = 1
+                elif ((48 <= LA1_1 <= 57)) :
                     alt1 = 2
-                elif (LA1_1 == 58) :
-                    alt1 = 1
                 else:
                     nvae = NoViableAltException("", 1, 1, self.input)
 
@@ -112,10 +113,10 @@
             elif LA1 == 49:
                 LA1_2 = self.input.LA(2)
 
-                if ((48 <= LA1_2 <= 57)) :
+                if (LA1_2 == 58) :
+                    alt1 = 1
+                elif ((48 <= LA1_2 <= 57)) :
                     alt1 = 3
-                elif (LA1_2 == 58) :
-                    alt1 = 1
                 else:
                     nvae = NoViableAltException("", 1, 2, self.input)
 
@@ -124,7 +125,7 @@
             elif LA1 == 50:
                 LA1_3 = self.input.LA(2)
 
-                if ((48 <= LA1_3 <= 52)) :
+                if ((48 <= LA1_3 <= 51)) :
                     alt1 = 4
                 elif (LA1_3 == 58) :
                     alt1 = 1
@@ -169,7 +170,7 @@
                 pass
                 pass
                 self.match(50)
-                self.matchRange(48, 52)
+                self.matchRange(48, 51)
 
 
 
@@ -436,6 +437,27 @@
 
 
 
+    def mDAY(self, ):
+
+        try:
+            _type = DAY
+            _channel = DEFAULT_CHANNEL
+
+            pass
+            self.match("day")
+
+
+
+            self._state.type = _type
+            self._state.channel = _channel
+
+        finally:
+
+            pass
+
+
+
+
     def mMONDAY(self, ):
 
         try:
@@ -1330,7 +1352,7 @@
 
 
     def mTokens(self):
-        alt25 = 36
+        alt25 = 37
         alt25 = self.dfa25.predict(self.input)
         if alt25 == 1:
             pass
@@ -1369,146 +1391,151 @@
 
         elif alt25 == 8:
             pass
-            self.mMONDAY()
+            self.mDAY()
 
 
         elif alt25 == 9:
             pass
-            self.mTUESDAY()
+            self.mMONDAY()
 
 
         elif alt25 == 10:
             pass
-            self.mWEDNESDAY()
+            self.mTUESDAY()
 
 
         elif alt25 == 11:
             pass
-            self.mTHURSDAY()
+            self.mWEDNESDAY()
 
 
         elif alt25 == 12:
             pass
-            self.mFRIDAY()
+            self.mTHURSDAY()
 
 
         elif alt25 == 13:
             pass
-            self.mSATURDAY()
+            self.mFRIDAY()
 
 
         elif alt25 == 14:
             pass
-            self.mSUNDAY()
+            self.mSATURDAY()
 
 
         elif alt25 == 15:
             pass
-            self.mJANUARY()
+            self.mSUNDAY()
 
 
         elif alt25 == 16:
             pass
-            self.mFEBRUARY()
+            self.mJANUARY()
 
 
         elif alt25 == 17:
             pass
-            self.mMARCH()
+            self.mFEBRUARY()
 
 
         elif alt25 == 18:
             pass
-            self.mAPRIL()
+            self.mMARCH()
 
 
         elif alt25 == 19:
             pass
-            self.mMAY()
+            self.mAPRIL()
 
 
         elif alt25 == 20:
             pass
-            self.mJUNE()
+            self.mMAY()
 
 
         elif alt25 == 21:
             pass
-            self.mJULY()
+            self.mJUNE()
 
 
         elif alt25 == 22:
             pass
-            self.mAUGUST()
+            self.mJULY()
 
 
         elif alt25 == 23:
             pass
-            self.mSEPTEMBER()
+            self.mAUGUST()
 
 
         elif alt25 == 24:
             pass
-            self.mOCTOBER()
+            self.mSEPTEMBER()
 
 
         elif alt25 == 25:
             pass
-            self.mNOVEMBER()
+            self.mOCTOBER()
 
 
         elif alt25 == 26:
             pass
-            self.mDECEMBER()
+            self.mNOVEMBER()
 
 
         elif alt25 == 27:
             pass
-            self.mMONTH()
+            self.mDECEMBER()
 
 
         elif alt25 == 28:
             pass
-            self.mQUARTER()
+            self.mMONTH()
 
 
         elif alt25 == 29:
             pass
-            self.mEVERY()
+            self.mQUARTER()
 
 
         elif alt25 == 30:
             pass
-            self.mHOURS()
+            self.mEVERY()
 
 
         elif alt25 == 31:
             pass
-            self.mMINUTES()
+            self.mHOURS()
 
 
         elif alt25 == 32:
             pass
-            self.mCOMMA()
+            self.mMINUTES()
 
 
         elif alt25 == 33:
             pass
-            self.mOF()
+            self.mCOMMA()
 
 
         elif alt25 == 34:
             pass
-            self.mWS()
+            self.mOF()
 
 
         elif alt25 == 35:
             pass
-            self.mDIGIT()
+            self.mWS()
 
 
         elif alt25 == 36:
             pass
+            self.mDIGIT()
+
+
+        elif alt25 == 37:
+            pass
             self.mDIGITS()
 
 
@@ -1519,73 +1546,74 @@
 
 
     DFA25_eot = DFA.unpack(
-        u"\1\uffff\4\27\2\uffff\1\27\1\uffff\2\27\16\uffff\1\36\1\uffff\2"
-        u"\36\31\uffff\1\74\6\uffff"
+        u"\1\uffff\4\30\2\uffff\1\30\1\uffff\2\30\14\uffff\1\36\3\uffff\2"
+        u"\36\33\uffff\1\76\6\uffff"
         )
 
     DFA25_eof = DFA.unpack(
-        u"\75\uffff"
+        u"\77\uffff"
         )
 
     DFA25_min = DFA.unpack(
-        u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\1\141\1\uffff\1\141\1\160"
-        u"\1\143\11\uffff\1\72\1\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
-        u"\uffff\1\151\2\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
+        u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\2\141\1\uffff\1\141\1\160"
+        u"\1\143\6\uffff\1\72\3\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
+        u"\uffff\1\151\4\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
         u"\uffff"
         )
 
     DFA25_max = DFA.unpack(
-        u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\157"
-        u"\1\uffff\2\165\1\146\11\uffff\1\72\1\uffff\2\72\3\uffff\1\162\3"
-        u"\uffff\1\160\3\uffff\1\165\2\uffff\1\156\1\171\2\uffff\1\156\6"
-        u"\uffff\1\164\6\uffff"
+        u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\145"
+        u"\1\157\1\uffff\2\165\1\146\6\uffff\1\72\3\uffff\2\72\3\uffff\1"
+        u"\162\3\uffff\1\160\3\uffff\1\165\4\uffff\1\156\1\171\2\uffff\1"
+        u"\156\6\uffff\1\164\6\uffff"
         )
 
     DFA25_accept = DFA.unpack(
-        u"\14\uffff\1\12\3\uffff\1\31\1\32\1\34\1\35\1\36\1\40\1\42\1\43"
-        u"\1\1\1\uffff\1\2\2\uffff\1\3\1\44\1\4\1\uffff\1\7\1\14\1\20\1\uffff"
-        u"\1\15\1\16\1\5\1\uffff\1\11\1\6\2\uffff\1\37\1\17\1\uffff\1\22"
-        u"\1\26\1\30\1\41\1\27\1\13\1\uffff\1\21\1\23\1\24\1\25\1\33\1\10"
+        u"\15\uffff\1\13\3\uffff\1\32\1\35\1\36\1\37\1\41\1\43\1\uffff\1"
+        u"\44\1\1\1\2\2\uffff\1\3\1\45\1\4\1\uffff\1\7\1\15\1\21\1\uffff"
+        u"\1\16\1\17\1\5\1\uffff\1\12\1\6\1\10\1\33\2\uffff\1\40\1\20\1\uffff"
+        u"\1\23\1\27\1\31\1\42\1\30\1\14\1\uffff\1\22\1\24\1\25\1\26\1\34"
+        u"\1\11"
         )
 
     DFA25_special = DFA.unpack(
-        u"\75\uffff"
+        u"\77\uffff"
         )
 
 
     DFA25_transition = [
         DFA.unpack(u"\2\26\2\uffff\1\26\22\uffff\1\26\13\uffff\1\25\3\uffff"
-        u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\16\2\uffff\1\21\1\23"
-        u"\1\5\1\uffff\1\24\1\uffff\1\15\2\uffff\1\13\1\20\1\17\1\uffff\1"
-        u"\22\1\uffff\1\6\1\10\2\uffff\1\14"),
-        DFA.unpack(u"\12\31\1\30"),
-        DFA.unpack(u"\12\33\1\30\70\uffff\1\32"),
-        DFA.unpack(u"\5\34\5\36\1\30\63\uffff\1\35"),
-        DFA.unpack(u"\12\36\1\30\67\uffff\1\37"),
+        u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\17\2\uffff\1\13\1\23"
+        u"\1\5\1\uffff\1\24\1\uffff\1\16\2\uffff\1\14\1\21\1\20\1\uffff\1"
+        u"\22\1\uffff\1\6\1\10\2\uffff\1\15"),
+        DFA.unpack(u"\12\27\1\31"),
+        DFA.unpack(u"\12\33\1\31\70\uffff\1\32"),
+        DFA.unpack(u"\4\34\6\36\1\31\63\uffff\1\35"),
+        DFA.unpack(u"\12\36\1\31\67\uffff\1\37"),
         DFA.unpack(u"\1\43\3\uffff\1\40\5\uffff\1\41\2\uffff\1\42"),
         DFA.unpack(u"\1\45\3\uffff\1\44\17\uffff\1\46"),
-        DFA.unpack(u"\12\36\1\30\71\uffff\1\47"),
+        DFA.unpack(u"\12\36\1\31\71\uffff\1\47"),
         DFA.unpack(u"\1\50\14\uffff\1\51"),
-        DFA.unpack(u"\12\36\1\30\71\uffff\1\52"),
-        DFA.unpack(u"\12\36\1\30"),
-        DFA.unpack(u"\1\54\7\uffff\1\55\5\uffff\1\53"),
+        DFA.unpack(u"\12\36\1\31\71\uffff\1\52"),
+        DFA.unpack(u"\12\36\1\31"),
+        DFA.unpack(u"\1\53\3\uffff\1\54"),
+        DFA.unpack(u"\1\56\7\uffff\1\57\5\uffff\1\55"),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\56\23\uffff\1\57"),
-        DFA.unpack(u"\1\60\4\uffff\1\61"),
-        DFA.unpack(u"\1\62\2\uffff\1\63"),
+        DFA.unpack(u"\1\60\23\uffff\1\61"),
+        DFA.unpack(u"\1\62\4\uffff\1\63"),
+        DFA.unpack(u"\1\64\2\uffff\1\65"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
+        DFA.unpack(u"\1\31"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\30"),
-        DFA.unpack(u""),
-        DFA.unpack(u"\1\30"),
-        DFA.unpack(u"\1\30"),
+        DFA.unpack(u"\1\31"),
+        DFA.unpack(u"\1\31"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
@@ -1593,25 +1621,27 @@
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\35\14\uffff\1\64"),
+        DFA.unpack(u"\1\35\14\uffff\1\66"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\37\13\uffff\1\65"),
+        DFA.unpack(u"\1\37\13\uffff\1\67"),
+        DFA.unpack(u""),
+        DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\66"),
-        DFA.unpack(u"\1\67\6\uffff\1\70"),
+        DFA.unpack(u"\1\70"),
+        DFA.unpack(u"\1\71\6\uffff\1\72"),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\72\1\uffff\1\71"),
+        DFA.unpack(u"\1\74\1\uffff\1\73"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
-        DFA.unpack(u"\1\73"),
+        DFA.unpack(u"\1\75"),
         DFA.unpack(u""),
         DFA.unpack(u""),
         DFA.unpack(u""),
--- a/thirdparty/google_appengine/google/appengine/cron/GrocParser.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/cron/GrocParser.py	Tue May 12 15:39:52 2009 +0200
@@ -32,39 +32,40 @@
 HIDDEN = BaseRecognizer.HIDDEN
 
 THIRD=12
-SEPTEMBER=34
+SEPTEMBER=35
 FOURTH=13
 SECOND=11
-WEDNESDAY=20
-NOVEMBER=36
-SATURDAY=23
-JULY=32
-APRIL=29
+WEDNESDAY=21
+NOVEMBER=37
+SATURDAY=24
+JULY=33
+APRIL=30
 DIGITS=8
-OCTOBER=35
-MAY=30
+OCTOBER=36
+MAY=31
 EVERY=6
-FEBRUARY=27
-MONDAY=18
-SUNDAY=24
-JUNE=31
-MARCH=28
+FEBRUARY=28
+MONDAY=19
+SUNDAY=25
+JUNE=32
+DAY=18
+MARCH=29
 OF=4
 EOF=-1
-JANUARY=26
-MONTH=25
-FRIDAY=22
+JANUARY=27
+MONTH=26
+FRIDAY=23
 FIFTH=14
 MINUTES=17
 TIME=5
-WS=39
-QUARTER=38
-THURSDAY=21
+WS=40
+QUARTER=39
+THURSDAY=22
 COMMA=9
-DECEMBER=37
-AUGUST=33
+DECEMBER=38
+AUGUST=34
 DIGIT=7
-TUESDAY=19
+TUESDAY=20
 HOURS=16
 FIRST=10
 FOURTH_OR_FIFTH=15
@@ -72,10 +73,10 @@
 tokenNames = [
     "<invalid>", "<EOR>", "<DOWN>", "<UP>",
     "OF", "TIME", "EVERY", "DIGIT", "DIGITS", "COMMA", "FIRST", "SECOND",
-    "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "MONDAY",
-    "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY",
-    "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE", "JULY",
-    "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
+    "THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "DAY",
+    "MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY",
+    "SUNDAY", "MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE",
+    "JULY", "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
     "WS"
 ]
 
@@ -95,6 +96,17 @@
         Parser.__init__(self, input, state)
 
 
+        self.dfa3 = self.DFA3(
+            self, 3,
+            eot = self.DFA3_eot,
+            eof = self.DFA3_eof,
+            min = self.DFA3_min,
+            max = self.DFA3_max,
+            accept = self.DFA3_accept,
+            special = self.DFA3_special,
+            transition = self.DFA3_transition
+            )
+
 
 
 
@@ -160,7 +172,7 @@
 
                     if ((DIGIT <= LA1_1 <= DIGITS)) :
                         alt1 = 2
-                    elif ((MONDAY <= LA1_1 <= SUNDAY)) :
+                    elif ((DAY <= LA1_1 <= SUNDAY)) :
                         alt1 = 1
                     else:
                         nvae = NoViableAltException("", 1, 1, self.input)
@@ -214,57 +226,77 @@
             try:
                 pass
                 pass
-                pass
-                pass
-                pass
-                self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
-                self.ordinals()
+                alt3 = 2
+                alt3 = self.dfa3.predict(self.input)
+                if alt3 == 1:
+                    pass
+                    pass
+                    pass
+                    self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
+                    self.ordinals()
+
+                    self._state.following.pop()
+                    self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
+                    self.weekdays()
+
+                    self._state.following.pop()
+
+
+
+                    self.match(self.input, OF, self.FOLLOW_OF_in_specifictime74)
+                    alt2 = 2
+                    LA2_0 = self.input.LA(1)
 
-                self._state.following.pop()
-                self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
-                self.weekdays()
+                    if ((MONTH <= LA2_0 <= DECEMBER)) :
+                        alt2 = 1
+                    elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
+                        alt2 = 2
+                    else:
+                        nvae = NoViableAltException("", 2, 0, self.input)
+
+                        raise nvae
 
-                self._state.following.pop()
+                    if alt2 == 1:
+                        pass
+                        self._state.following.append(self.FOLLOW_monthspec_in_specifictime77)
+                        self.monthspec()
+
+                        self._state.following.pop()
+
+
+                    elif alt2 == 2:
+                        pass
+                        self._state.following.append(self.FOLLOW_quarterspec_in_specifictime79)
+                        self.quarterspec()
+
+                        self._state.following.pop()
 
 
 
 
 
 
-                self.match(self.input, OF, self.FOLLOW_OF_in_specifictime75)
-                alt2 = 2
-                LA2_0 = self.input.LA(1)
+
 
-                if ((MONTH <= LA2_0 <= DECEMBER)) :
-                    alt2 = 1
-                elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
-                    alt2 = 2
-                else:
-                    nvae = NoViableAltException("", 2, 0, self.input)
-
-                    raise nvae
-
-                if alt2 == 1:
+                elif alt3 == 2:
+                    pass
                     pass
-                    self._state.following.append(self.FOLLOW_monthspec_in_specifictime78)
-                    self.monthspec()
+                    self._state.following.append(self.FOLLOW_ordinals_in_specifictime96)
+                    self.ordinals()
 
                     self._state.following.pop()
-
-
-                elif alt2 == 2:
-                    pass
-                    self._state.following.append(self.FOLLOW_quarterspec_in_specifictime80)
-                    self.quarterspec()
+                    self._state.following.append(self.FOLLOW_weekdays_in_specifictime98)
+                    self.weekdays()
 
                     self._state.following.pop()
+                    self.month_set = set(range(1,13))
 
 
 
 
 
 
-                TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime93)
+                TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime112)
                 self.time_string = TIME1.text
 
 
@@ -294,7 +326,7 @@
             try:
                 pass
                 pass
-                self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval112)
+                self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval131)
                 intervalnum = self.input.LT(1)
                 if (DIGIT <= self.input.LA(1) <= DIGITS):
                     self.input.consume()
@@ -308,7 +340,7 @@
 
                 self.interval_mins = int(intervalnum.text)
 
-                self._state.following.append(self.FOLLOW_period_in_interval138)
+                self._state.following.append(self.FOLLOW_period_in_interval157)
                 period2 = self.period()
 
                 self._state.following.pop()
@@ -341,43 +373,43 @@
         try:
             try:
                 pass
-                alt4 = 2
-                LA4_0 = self.input.LA(1)
+                alt5 = 2
+                LA5_0 = self.input.LA(1)
 
-                if (LA4_0 == EVERY) :
-                    alt4 = 1
-                elif ((FIRST <= LA4_0 <= FOURTH_OR_FIFTH)) :
-                    alt4 = 2
+                if (LA5_0 == EVERY) :
+                    alt5 = 1
+                elif ((FIRST <= LA5_0 <= FOURTH_OR_FIFTH)) :
+                    alt5 = 2
                 else:
-                    nvae = NoViableAltException("", 4, 0, self.input)
+                    nvae = NoViableAltException("", 5, 0, self.input)
 
                     raise nvae
 
-                if alt4 == 1:
+                if alt5 == 1:
                     pass
-                    self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals157)
+                    self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals176)
                     self.ordinal_set = self.ordinal_set.union(allOrdinals)
 
 
-                elif alt4 == 2:
+                elif alt5 == 2:
                     pass
                     pass
-                    self._state.following.append(self.FOLLOW_ordinal_in_ordinals173)
+                    self._state.following.append(self.FOLLOW_ordinal_in_ordinals192)
                     self.ordinal()
 
                     self._state.following.pop()
                     while True:
-                        alt3 = 2
-                        LA3_0 = self.input.LA(1)
+                        alt4 = 2
+                        LA4_0 = self.input.LA(1)
 
-                        if (LA3_0 == COMMA) :
-                            alt3 = 1
+                        if (LA4_0 == COMMA) :
+                            alt4 = 1
 
 
-                        if alt3 == 1:
+                        if alt4 == 1:
                             pass
-                            self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals176)
-                            self._state.following.append(self.FOLLOW_ordinal_in_ordinals178)
+                            self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals195)
+                            self._state.following.append(self.FOLLOW_ordinal_in_ordinals197)
                             self.ordinal()
 
                             self._state.following.pop()
@@ -489,30 +521,58 @@
         try:
             try:
                 pass
-                pass
-                self._state.following.append(self.FOLLOW_weekday_in_weekdays261)
-                self.weekday()
+                alt7 = 2
+                LA7_0 = self.input.LA(1)
+
+                if (LA7_0 == DAY) :
+                    alt7 = 1
+                elif ((MONDAY <= LA7_0 <= SUNDAY)) :
+                    alt7 = 2
+                else:
+                    nvae = NoViableAltException("", 7, 0, self.input)
 
-                self._state.following.pop()
-                while True:
-                    alt5 = 2
-                    LA5_0 = self.input.LA(1)
+                    raise nvae
+
+                if alt7 == 1:
+                    pass
+                    self.match(self.input, DAY, self.FOLLOW_DAY_in_weekdays280)
 
-                    if (LA5_0 == COMMA) :
-                        alt5 = 1
+                    self.weekday_set = set([self.ValueOf(SUNDAY), self.ValueOf(MONDAY),
+                            self.ValueOf(TUESDAY), self.ValueOf(WEDNESDAY),
+                            self.ValueOf(THURSDAY), self.ValueOf(FRIDAY),
+                            self.ValueOf(SATURDAY), self.ValueOf(SUNDAY)])
+
 
 
-                    if alt5 == 1:
-                        pass
-                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays264)
-                        self._state.following.append(self.FOLLOW_weekday_in_weekdays266)
-                        self.weekday()
+                elif alt7 == 2:
+                    pass
+                    pass
+                    self._state.following.append(self.FOLLOW_weekday_in_weekdays288)
+                    self.weekday()
 
-                        self._state.following.pop()
+                    self._state.following.pop()
+                    while True:
+                        alt6 = 2
+                        LA6_0 = self.input.LA(1)
+
+                        if (LA6_0 == COMMA) :
+                            alt6 = 1
 
 
-                    else:
-                        break
+                        if alt6 == 1:
+                            pass
+                            self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays291)
+                            self._state.following.append(self.FOLLOW_weekday_in_weekdays293)
+                            self.weekday()
+
+                            self._state.following.pop()
+
+
+                        else:
+                            break
+
+
+
 
 
 
@@ -573,21 +633,21 @@
         try:
             try:
                 pass
-                alt6 = 2
-                LA6_0 = self.input.LA(1)
+                alt8 = 2
+                LA8_0 = self.input.LA(1)
 
-                if (LA6_0 == MONTH) :
-                    alt6 = 1
-                elif ((JANUARY <= LA6_0 <= DECEMBER)) :
-                    alt6 = 2
+                if (LA8_0 == MONTH) :
+                    alt8 = 1
+                elif ((JANUARY <= LA8_0 <= DECEMBER)) :
+                    alt8 = 2
                 else:
-                    nvae = NoViableAltException("", 6, 0, self.input)
+                    nvae = NoViableAltException("", 8, 0, self.input)
 
                     raise nvae
 
-                if alt6 == 1:
+                if alt8 == 1:
                     pass
-                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec344)
+                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec373)
 
                     self.month_set = self.month_set.union(set([
                         self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
@@ -598,9 +658,9 @@
 
 
 
-                elif alt6 == 2:
+                elif alt8 == 2:
                     pass
-                    self._state.following.append(self.FOLLOW_months_in_monthspec354)
+                    self._state.following.append(self.FOLLOW_months_in_monthspec383)
                     self.months()
 
                     self._state.following.pop()
@@ -628,22 +688,22 @@
             try:
                 pass
                 pass
-                self._state.following.append(self.FOLLOW_month_in_months371)
+                self._state.following.append(self.FOLLOW_month_in_months400)
                 self.month()
 
                 self._state.following.pop()
                 while True:
-                    alt7 = 2
-                    LA7_0 = self.input.LA(1)
+                    alt9 = 2
+                    LA9_0 = self.input.LA(1)
 
-                    if (LA7_0 == COMMA) :
-                        alt7 = 1
+                    if (LA9_0 == COMMA) :
+                        alt9 = 1
 
 
-                    if alt7 == 1:
+                    if alt9 == 1:
                         pass
-                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months374)
-                        self._state.following.append(self.FOLLOW_month_in_months376)
+                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months403)
+                        self._state.following.append(self.FOLLOW_month_in_months405)
                         self.month()
 
                         self._state.following.pop()
@@ -709,37 +769,37 @@
         try:
             try:
                 pass
-                alt8 = 2
-                LA8_0 = self.input.LA(1)
+                alt10 = 2
+                LA10_0 = self.input.LA(1)
 
-                if (LA8_0 == QUARTER) :
-                    alt8 = 1
-                elif ((FIRST <= LA8_0 <= THIRD)) :
-                    alt8 = 2
+                if (LA10_0 == QUARTER) :
+                    alt10 = 1
+                elif ((FIRST <= LA10_0 <= THIRD)) :
+                    alt10 = 2
                 else:
-                    nvae = NoViableAltException("", 8, 0, self.input)
+                    nvae = NoViableAltException("", 10, 0, self.input)
 
                     raise nvae
 
-                if alt8 == 1:
+                if alt10 == 1:
                     pass
-                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec468)
+                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec497)
 
                     self.month_set = self.month_set.union(set([
                         self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
                         self.ValueOf(OCTOBER)]))
 
 
-                elif alt8 == 2:
+                elif alt10 == 2:
                     pass
                     pass
-                    self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec480)
+                    self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec509)
                     self.quarter_ordinals()
 
                     self._state.following.pop()
-                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec482)
-                    self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec484)
-                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec486)
+                    self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec511)
+                    self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec513)
+                    self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec515)
 
 
 
@@ -767,22 +827,22 @@
             try:
                 pass
                 pass
-                self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505)
+                self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals534)
                 self.month_of_quarter_ordinal()
 
                 self._state.following.pop()
                 while True:
-                    alt9 = 2
-                    LA9_0 = self.input.LA(1)
+                    alt11 = 2
+                    LA11_0 = self.input.LA(1)
 
-                    if (LA9_0 == COMMA) :
-                        alt9 = 1
+                    if (LA11_0 == COMMA) :
+                        alt11 = 1
 
 
-                    if alt9 == 1:
+                    if alt11 == 1:
                         pass
-                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals508)
-                        self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510)
+                        self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals537)
+                        self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals539)
                         self.month_of_quarter_ordinal()
 
                         self._state.following.pop()
@@ -850,43 +910,88 @@
 
 
 
+    DFA3_eot = DFA.unpack(
+        u"\13\uffff"
+        )
+
+    DFA3_eof = DFA.unpack(
+        u"\13\uffff"
+        )
+
+    DFA3_min = DFA.unpack(
+        u"\1\6\1\22\1\11\2\4\1\12\2\uffff\1\23\1\11\1\4"
+        )
+
+    DFA3_max = DFA.unpack(
+        u"\1\17\2\31\1\5\1\11\1\17\2\uffff\2\31\1\11"
+        )
+
+    DFA3_accept = DFA.unpack(
+        u"\6\uffff\1\1\1\2\3\uffff"
+        )
+
+    DFA3_special = DFA.unpack(
+        u"\13\uffff"
+        )
+
+
+    DFA3_transition = [
+        DFA.unpack(u"\1\1\3\uffff\6\2"),
+        DFA.unpack(u"\1\3\7\4"),
+        DFA.unpack(u"\1\5\10\uffff\1\3\7\4"),
+        DFA.unpack(u"\1\6\1\7"),
+        DFA.unpack(u"\1\6\1\7\3\uffff\1\10"),
+        DFA.unpack(u"\6\11"),
+        DFA.unpack(u""),
+        DFA.unpack(u""),
+        DFA.unpack(u"\7\12"),
+        DFA.unpack(u"\1\5\10\uffff\1\3\7\4"),
+        DFA.unpack(u"\1\6\1\7\3\uffff\1\10")
+    ]
+
+
+    DFA3 = DFA
+
 
     FOLLOW_specifictime_in_timespec44 = frozenset([1])
     FOLLOW_interval_in_timespec48 = frozenset([1])
-    FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24])
+    FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
     FOLLOW_weekdays_in_specifictime71 = frozenset([4])
-    FOLLOW_OF_in_specifictime75 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
-    FOLLOW_monthspec_in_specifictime78 = frozenset([5])
-    FOLLOW_quarterspec_in_specifictime80 = frozenset([5])
-    FOLLOW_TIME_in_specifictime93 = frozenset([1])
-    FOLLOW_EVERY_in_interval112 = frozenset([7, 8])
-    FOLLOW_set_in_interval122 = frozenset([16, 17])
-    FOLLOW_period_in_interval138 = frozenset([1])
-    FOLLOW_EVERY_in_ordinals157 = frozenset([1])
-    FOLLOW_ordinal_in_ordinals173 = frozenset([1, 9])
-    FOLLOW_COMMA_in_ordinals176 = frozenset([10, 11, 12, 13, 14, 15])
-    FOLLOW_ordinal_in_ordinals178 = frozenset([1, 9])
-    FOLLOW_set_in_ordinal199 = frozenset([1])
-    FOLLOW_set_in_period238 = frozenset([1])
-    FOLLOW_weekday_in_weekdays261 = frozenset([1, 9])
-    FOLLOW_COMMA_in_weekdays264 = frozenset([18, 19, 20, 21, 22, 23, 24])
-    FOLLOW_weekday_in_weekdays266 = frozenset([1, 9])
-    FOLLOW_set_in_weekday285 = frozenset([1])
-    FOLLOW_MONTH_in_monthspec344 = frozenset([1])
-    FOLLOW_months_in_monthspec354 = frozenset([1])
-    FOLLOW_month_in_months371 = frozenset([1, 9])
-    FOLLOW_COMMA_in_months374 = frozenset([25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37])
-    FOLLOW_month_in_months376 = frozenset([1, 9])
-    FOLLOW_set_in_month395 = frozenset([1])
-    FOLLOW_QUARTER_in_quarterspec468 = frozenset([1])
-    FOLLOW_quarter_ordinals_in_quarterspec480 = frozenset([25])
-    FOLLOW_MONTH_in_quarterspec482 = frozenset([4])
-    FOLLOW_OF_in_quarterspec484 = frozenset([38])
-    FOLLOW_QUARTER_in_quarterspec486 = frozenset([1])
-    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505 = frozenset([1, 9])
-    FOLLOW_COMMA_in_quarter_ordinals508 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
-    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510 = frozenset([1, 9])
-    FOLLOW_set_in_month_of_quarter_ordinal529 = frozenset([1])
+    FOLLOW_OF_in_specifictime74 = frozenset([10, 11, 12, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
+    FOLLOW_monthspec_in_specifictime77 = frozenset([5])
+    FOLLOW_quarterspec_in_specifictime79 = frozenset([5])
+    FOLLOW_ordinals_in_specifictime96 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+    FOLLOW_weekdays_in_specifictime98 = frozenset([5])
+    FOLLOW_TIME_in_specifictime112 = frozenset([1])
+    FOLLOW_EVERY_in_interval131 = frozenset([7, 8])
+    FOLLOW_set_in_interval141 = frozenset([16, 17])
+    FOLLOW_period_in_interval157 = frozenset([1])
+    FOLLOW_EVERY_in_ordinals176 = frozenset([1])
+    FOLLOW_ordinal_in_ordinals192 = frozenset([1, 9])
+    FOLLOW_COMMA_in_ordinals195 = frozenset([10, 11, 12, 13, 14, 15])
+    FOLLOW_ordinal_in_ordinals197 = frozenset([1, 9])
+    FOLLOW_set_in_ordinal218 = frozenset([1])
+    FOLLOW_set_in_period257 = frozenset([1])
+    FOLLOW_DAY_in_weekdays280 = frozenset([1])
+    FOLLOW_weekday_in_weekdays288 = frozenset([1, 9])
+    FOLLOW_COMMA_in_weekdays291 = frozenset([18, 19, 20, 21, 22, 23, 24, 25])
+    FOLLOW_weekday_in_weekdays293 = frozenset([1, 9])
+    FOLLOW_set_in_weekday314 = frozenset([1])
+    FOLLOW_MONTH_in_monthspec373 = frozenset([1])
+    FOLLOW_months_in_monthspec383 = frozenset([1])
+    FOLLOW_month_in_months400 = frozenset([1, 9])
+    FOLLOW_COMMA_in_months403 = frozenset([26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
+    FOLLOW_month_in_months405 = frozenset([1, 9])
+    FOLLOW_set_in_month424 = frozenset([1])
+    FOLLOW_QUARTER_in_quarterspec497 = frozenset([1])
+    FOLLOW_quarter_ordinals_in_quarterspec509 = frozenset([26])
+    FOLLOW_MONTH_in_quarterspec511 = frozenset([4])
+    FOLLOW_OF_in_quarterspec513 = frozenset([39])
+    FOLLOW_QUARTER_in_quarterspec515 = frozenset([1])
+    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals534 = frozenset([1, 9])
+    FOLLOW_COMMA_in_quarter_ordinals537 = frozenset([10, 11, 12, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39])
+    FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals539 = frozenset([1, 9])
+    FOLLOW_set_in_month_of_quarter_ordinal558 = frozenset([1])
 
 
 
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Tue May 12 15:39:52 2009 +0200
@@ -380,6 +380,8 @@
   limit_ = 0
   has_require_perfect_plan_ = 0
   require_perfect_plan_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
 
   def __init__(self, contents=None):
     self.filter_ = []
@@ -545,6 +547,19 @@
 
   def has_require_perfect_plan(self): return self.has_require_perfect_plan_
 
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -559,6 +574,7 @@
     if (x.has_limit()): self.set_limit(x.limit())
     for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
     if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
 
   def Equals(self, x):
     if x is self: return 1
@@ -587,6 +603,8 @@
       if e1 != e2: return 0
     if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
     if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -620,6 +638,7 @@
     n += 2 * len(self.composite_index_)
     for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
     if (self.has_require_perfect_plan_): n += 3
+    if (self.has_keys_only_): n += 3
     return n + 1
 
   def Clear(self):
@@ -634,6 +653,7 @@
     self.clear_limit()
     self.clear_composite_index()
     self.clear_require_perfect_plan()
+    self.clear_keys_only()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -672,6 +692,9 @@
     if (self.has_require_perfect_plan_):
       out.putVarInt32(160)
       out.putBoolean(self.require_perfect_plan_)
+    if (self.has_keys_only_):
+      out.putVarInt32(168)
+      out.putBoolean(self.keys_only_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -715,6 +738,9 @@
       if tt == 160:
         self.set_require_perfect_plan(d.getBoolean())
         continue
+      if tt == 168:
+        self.set_keys_only(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -756,6 +782,7 @@
       res+=prefix+">\n"
       cnt+=1
     if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
     return res
 
   kapp = 1
@@ -773,6 +800,7 @@
   klimit = 16
   kcomposite_index = 19
   krequire_perfect_plan = 20
+  kkeys_only = 21
 
   _TEXT = (
    "ErrorCode",
@@ -796,6 +824,7 @@
    "hint",
    "composite_index",
    "require_perfect_plan",
+   "keys_only",
   )
 
   _TYPES = (
@@ -840,6 +869,8 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.NUMERIC,
+
   )
 
   _STYLE = """"""
@@ -2416,6 +2447,8 @@
   cursor_ = None
   has_more_results_ = 0
   more_results_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
 
   def __init__(self, contents=None):
     self.result_ = []
@@ -2469,12 +2502,26 @@
 
   def has_more_results(self): return self.has_more_results_
 
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
     for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
     if (x.has_more_results()): self.set_more_results(x.more_results())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2485,6 +2532,8 @@
       if e1 != e2: return 0
     if self.has_more_results_ != x.has_more_results_: return 0
     if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2503,12 +2552,14 @@
     if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
     n += 1 * len(self.result_)
     for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
+    if (self.has_keys_only_): n += 2
     return n + 2
 
   def Clear(self):
     self.clear_cursor()
     self.clear_result()
     self.clear_more_results()
+    self.clear_keys_only()
 
   def OutputUnchecked(self, out):
     if (self.has_cursor_):
@@ -2521,6 +2572,9 @@
       self.result_[i].OutputUnchecked(out)
     out.putVarInt32(24)
     out.putBoolean(self.more_results_)
+    if (self.has_keys_only_):
+      out.putVarInt32(32)
+      out.putBoolean(self.keys_only_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2540,6 +2594,9 @@
       if tt == 24:
         self.set_more_results(d.getBoolean())
         continue
+      if tt == 32:
+        self.set_keys_only(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2559,17 +2616,20 @@
       res+=prefix+">\n"
       cnt+=1
     if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
     return res
 
   kcursor = 1
   kresult = 2
   kmore_results = 3
+  kkeys_only = 4
 
   _TEXT = (
    "ErrorCode",
    "cursor",
    "result",
    "more_results",
+   "keys_only",
   )
 
   _TYPES = (
@@ -2580,6 +2640,8 @@
 
    ProtocolBuffer.Encoder.NUMERIC,
 
+   ProtocolBuffer.Encoder.NUMERIC,
+
   )
 
   _STYLE = """"""
--- a/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Tue May 12 15:39:52 2009 +0200
@@ -1019,6 +1019,10 @@
       if debug_strs is not None:
         debug_strs.append('Required field: value not set.')
     elif not self.value_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_multiple_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: multiple not set.')
     return initialized
 
   def ByteSize(self):
@@ -1027,8 +1031,7 @@
     if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
     n += self.lengthString(len(self.name_))
     n += self.lengthString(self.value_.ByteSize())
-    if (self.has_multiple_): n += 2
-    return n + 2
+    return n + 4
 
   def Clear(self):
     self.clear_meaning()
@@ -1046,9 +1049,8 @@
       out.putPrefixedString(self.meaning_uri_)
     out.putVarInt32(26)
     out.putPrefixedString(self.name_)
-    if (self.has_multiple_):
-      out.putVarInt32(32)
-      out.putBoolean(self.multiple_)
+    out.putVarInt32(32)
+    out.putBoolean(self.multiple_)
     out.putVarInt32(42)
     out.putVarInt32(self.value_.ByteSize())
     self.value_.OutputUnchecked(out)
--- a/thirdparty/google_appengine/google/appengine/dist/socket.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/socket.py	Tue May 12 15:39:52 2009 +0200
@@ -41,3 +41,5 @@
   if not hasattr(fp, 'fileno'):
     fp.fileno = lambda: None
   return fp
+
+ssl = None
--- a/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Tue May 12 15:39:52 2009 +0200
@@ -1101,6 +1101,9 @@
   def parse(self, value):
     return None
 
+  def python_type(self):
+    return None
+
   def format(self, value):
     return 'None'
 
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Tue May 12 15:39:52 2009 +0200
@@ -269,6 +269,9 @@
       model_class._properties[attr_name] = attr
       attr.__property_config__(model_class, attr_name)
 
+  model_class._unindexed_properties = frozenset(
+    name for name, prop in model_class._properties.items() if not prop.indexed)
+
 
 class PropertiedClass(type):
   """Meta-class for initializing Model classes properties.
@@ -336,8 +339,14 @@
 
   creation_counter = 0
 
-  def __init__(self, verbose_name=None, name=None, default=None,
-               required=False, validator=None, choices=None):
+  def __init__(self,
+               verbose_name=None,
+               name=None,
+               default=None,
+               required=False,
+               validator=None,
+               choices=None,
+               indexed=True):
     """Initializes this Property with the given options.
 
     Args:
@@ -348,6 +357,7 @@
       required: Whether property is required.
       validator: User provided method used for validation.
       choices: User provided set of valid property values.
+      indexed: Whether property is indexed.
     """
     self.verbose_name = verbose_name
     self.name = name
@@ -355,6 +365,7 @@
     self.required = required
     self.validator = validator
     self.choices = choices
+    self.indexed = indexed
     self.creation_counter = Property.creation_counter
     Property.creation_counter += 1
 
@@ -489,6 +500,21 @@
     """
     return value
 
+  def _require_parameter(self, kwds, parameter, value):
+    """Sets kwds[parameter] to value.
+
+    If kwds[parameter] exists and is not value, raises ConfigurationError.
+
+    Args:
+      kwds: The parameter dict, which maps parameter names (strings) to values.
+      parameter: The name of the parameter to set.
+      value: The value to set it to.
+    """
+    if parameter in kwds and kwds[parameter] != value:
+      raise ConfigurationError('%s must be %s.' % (parameter, value))
+
+    kwds[parameter] = value
+
   def _attr_name(self):
     """Attribute name we use for this property in model instances.
 
@@ -685,20 +711,15 @@
     if self.is_saved():
       entity = self._entity
     else:
+      kwds = {'_app': self._app,
+              'name': self._key_name,
+              'unindexed_properties': self._unindexed_properties}
+
       if self._parent_key is not None:
-        entity = _entity_class(self.kind(),
-                               parent=self._parent_key,
-                               name=self._key_name,
-                               _app=self._app)
+        kwds['parent'] = self._parent_key
       elif self._parent is not None:
-        entity = _entity_class(self.kind(),
-                               parent=self._parent._entity,
-                               name=self._key_name,
-                               _app=self._app)
-      else:
-        entity = _entity_class(self.kind(),
-                               name=self._key_name,
-                               _app=self._app)
+        kwds['parent'] = self._parent._entity
+      entity = _entity_class(self.kind(), **kwds)
 
     self._to_entity(entity)
     return entity
@@ -932,13 +953,13 @@
     return run_in_transaction(txn)
 
   @classmethod
-  def all(cls):
+  def all(cls, **kwds):
     """Returns a query over all instances of this model from the datastore.
 
     Returns:
       Query that will retrieve all instances from entity collection.
     """
-    return Query(cls)
+    return Query(cls, **kwds)
 
   @classmethod
   def gql(cls, query_string, *args, **kwds):
@@ -1300,13 +1321,23 @@
 class _BaseQuery(object):
   """Base class for both Query and GqlQuery."""
 
-  def __init__(self, model_class):
-    """Constructor."
-
-      Args:
-        model_class: Model class from which entities are constructed.
+  def __init__(self, model_class, keys_only=False):
+    """Constructor.
+
+    Args:
+      model_class: Model class from which entities are constructed.
+      keys_only: Whether the query should return full entities or only keys.
     """
     self._model_class = model_class
+    self._keys_only = keys_only
+
+  def is_keys_only(self):
+    """Returns whether this query is keys only.
+
+    Returns:
+      True if this query returns keys, False if it returns entities.
+    """
+    return self._keys_only
 
   def _get_query(self):
     """Subclass must override (and not call their super method).
@@ -1325,7 +1356,11 @@
     Returns:
       Iterator for this query.
     """
-    return _QueryIterator(self._model_class, iter(self._get_query().Run()))
+    iterator = self._get_query().Run()
+    if self._keys_only:
+      return iterator
+    else:
+      return _QueryIterator(self._model_class, iter(iterator))
 
   def __iter__(self):
     """Iterator for this query.
@@ -1388,7 +1423,11 @@
     if limit == 0:
       return []
     raw = self._get_query().Get(limit, offset)
-    return map(self._model_class.from_entity, raw)
+
+    if self._keys_only:
+      return raw
+    else:
+      return [self._model_class.from_entity(e) for e in raw]
 
   def __getitem__(self, arg):
     """Support for query[index] and query[start:stop].
@@ -1529,13 +1568,14 @@
        print story.title
   """
 
-  def __init__(self, model_class):
+  def __init__(self, model_class, keys_only=False):
     """Constructs a query over instances of the given Model.
 
     Args:
       model_class: Model class to build query for.
+      keys_only: Whether the query should return full entities or only keys.
     """
-    super(Query, self).__init__(model_class)
+    super(Query, self).__init__(model_class, keys_only)
     self.__query_sets = [{}]
     self.__orderings = []
     self.__ancestor = None
@@ -1545,7 +1585,10 @@
                  _multi_query_class=datastore.MultiQuery):
     queries = []
     for query_set in self.__query_sets:
-      query = _query_class(self._model_class.kind(), query_set)
+      query = _query_class(self._model_class.kind(),
+                           query_set,
+                           keys_only=self._keys_only)
+      query.Order(*self.__orderings)
       if self.__ancestor is not None:
         query.Ancestor(self.__ancestor)
       queries.append(query)
@@ -1566,7 +1609,6 @@
                              ' _multi_query_class is overridden.')
 
     if len(queries) == 1:
-      queries[0].Order(*self.__orderings)
       return queries[0]
     else:
       return _multi_query_class(queries, self.__orderings)
@@ -1611,6 +1653,9 @@
 
     Returns:
       Self to support method chaining.
+
+    Raises:
+      PropertyError if invalid property is provided.
     """
     match = _FILTER_REGEX.match(property_operator)
     prop = match.group(1)
@@ -1619,8 +1664,13 @@
     else:
       operator = '=='
 
+    if prop in self._model_class._unindexed_properties:
+      raise PropertyError('Property \'%s\' is not indexed' % prop)
+
     if operator.lower() == 'in':
-      if not isinstance(value, (list, tuple)):
+      if self._keys_only:
+        raise BadQueryError('Keys only queries do not support IN filters.')
+      elif not isinstance(value, (list, tuple)):
         raise BadValueError('Argument to the "in" operator must be a list')
       values = [_normalize_query_parameter(v) for v in value]
       self.__filter_disjunction(prop + ' =', values)
@@ -1628,6 +1678,8 @@
       if isinstance(value, (list, tuple)):
         raise BadValueError('Filtering on lists is not supported')
       if operator == '!=':
+        if self._keys_only:
+          raise BadQueryError('Keys only queries do not support != filters.')
         self.__filter_disjunction([prop + ' <', prop + ' >'],
                                   _normalize_query_parameter(value))
       else:
@@ -1650,7 +1702,7 @@
       Self to support method chaining.
 
     Raises:
-      PropertyError if invalid property name is provided.
+      PropertyError if invalid property is provided.
     """
     if property.startswith('-'):
       property = property[1:]
@@ -1663,6 +1715,9 @@
           property not in datastore_types._SPECIAL_PROPERTIES):
         raise PropertyError('Invalid property name \'%s\'' % property)
 
+    if property in self._model_class._unindexed_properties:
+      raise PropertyError('Property \'%s\' is not indexed' % property)
+
     self.__orderings.append((property, order))
     return self
 
@@ -1709,11 +1764,24 @@
       query_string: Properly formatted GQL query string.
       *args: Positional arguments used to bind numeric references in the query.
       **kwds: Dictionary-based arguments for named references.
+
+    Raises:
+      PropertyError if the query filters or sorts on a property that's not
+      indexed.
     """
     from google.appengine.ext import gql
     app = kwds.pop('_app', None)
+
     self._proto_query = gql.GQL(query_string, _app=app)
-    super(GqlQuery, self).__init__(class_for_kind(self._proto_query._entity))
+    model_class = class_for_kind(self._proto_query._entity)
+    super(GqlQuery, self).__init__(model_class,
+                                   keys_only=self._proto_query._keys_only)
+
+    for property, unused in (self._proto_query.filters().keys() +
+                             self._proto_query.orderings()):
+      if property in model_class._unindexed_properties:
+        raise PropertyError('Property \'%s\' is not indexed' % property)
+
     self.bind(*args, **kwds)
 
   def bind(self, *args, **kwds):
@@ -1740,39 +1808,56 @@
   def run(self):
     """Override _BaseQuery.run() so the LIMIT clause is handled properly."""
     query_run = self._proto_query.Run(*self._args, **self._kwds)
-    return _QueryIterator(self._model_class, iter(query_run))
+    if self._keys_only:
+      return query_run
+    else:
+      return _QueryIterator(self._model_class, iter(query_run))
 
   def _get_query(self):
     return self._proto_query.Bind(self._args, self._kwds)
 
 
-class TextProperty(Property):
-  """A string that can be longer than 500 bytes.
-
-  This type should be used for large text values to make sure the datastore
-  has good performance for queries.
+class UnindexedProperty(Property):
+  """A property that isn't indexed by either built-in or composite indices.
+
+  TextProperty and BlobProperty derive from this class.
   """
+  def __init__(self, *args, **kwds):
+    """Construct property. See the Property class for details.
+
+    Raises:
+      ConfigurationError if indexed=True.
+    """
+    self._require_parameter(kwds, 'indexed', False)
+    kwds['indexed'] = True
+    super(UnindexedProperty, self).__init__(*args, **kwds)
 
   def validate(self, value):
-    """Validate text property.
+    """Validate property.
 
     Returns:
       A valid value.
 
     Raises:
-      BadValueError if property is not instance of 'Text'.
+      BadValueError if property is not an instance of data_type.
     """
-    if value is not None and not isinstance(value, Text):
+    if value is not None and not isinstance(value, self.data_type):
       try:
-        value = Text(value)
+        value = self.data_type(value)
       except TypeError, err:
         raise BadValueError('Property %s must be convertible '
-                            'to a Text instance (%s)' % (self.name, err))
-    value = super(TextProperty, self).validate(value)
-    if value is not None and not isinstance(value, Text):
-      raise BadValueError('Property %s must be a Text instance' % self.name)
+                            'to a %s instance (%s)' %
+                            (self.name, self.data_type.__name__, err))
+    value = super(UnindexedProperty, self).validate(value)
+    if value is not None and not isinstance(value, self.data_type):
+      raise BadValueError('Property %s must be a %s instance' %
+                          (self.name, self.data_type.__name__))
     return value
 
+
+class TextProperty(UnindexedProperty):
+  """A string that can be longer than 500 bytes."""
+
   data_type = Text
 
 
@@ -1886,32 +1971,8 @@
   data_type = PostalAddress
 
 
-class BlobProperty(Property):
-  """A string that can be longer than 500 bytes.
-
-  This type should be used for large binary values to make sure the datastore
-  has good performance for queries.
-  """
-
-  def validate(self, value):
-    """Validate blob property.
-
-    Returns:
-      A valid value.
-
-    Raises:
-      BadValueError if property is not instance of 'Blob'.
-    """
-    if value is not None and not isinstance(value, Blob):
-      try:
-        value = Blob(value)
-      except TypeError, err:
-        raise BadValueError('Property %s must be convertible '
-                            'to a Blob instance (%s)' % (self.name, err))
-    value = super(BlobProperty, self).validate(value)
-    if value is not None and not isinstance(value, Blob):
-      raise BadValueError('Property %s must be a Blob instance' % self.name)
-    return value
+class BlobProperty(UnindexedProperty):
+  """A byte string that can be longer than 500 bytes."""
 
   data_type = Blob
 
@@ -2266,9 +2327,15 @@
 class UserProperty(Property):
   """A user property."""
 
-  def __init__(self, verbose_name=None, name=None,
-               required=False, validator=None, choices=None,
-               auto_current_user=False, auto_current_user_add=False):
+  def __init__(self,
+               verbose_name=None,
+               name=None,
+               required=False,
+               validator=None,
+               choices=None,
+               auto_current_user=False,
+               auto_current_user_add=False,
+               indexed=True):
     """Initializes this Property with the given options.
 
     Note: this does *not* support the 'default' keyword argument.
@@ -2285,11 +2352,13 @@
         each time the entity is written to the datastore.
       auto_current_user_add: If true, the value is set to the current user
         the first time the entity is written to the datastore.
+      indexed: Whether property is indexed.
     """
     super(UserProperty, self).__init__(verbose_name, name,
                                        required=required,
                                        validator=validator,
-                                       choices=choices)
+                                       choices=choices,
+                                       indexed=indexed)
     self.auto_current_user = auto_current_user
     self.auto_current_user_add = auto_current_user_add
 
@@ -2360,13 +2429,14 @@
       raise TypeError('Item type should be a type object')
     if item_type not in _ALLOWED_PROPERTY_TYPES:
       raise ValueError('Item type %s is not acceptable' % item_type.__name__)
-    if 'required' in kwds and kwds['required'] is not True:
-      raise ValueError('List values must be required')
+    if issubclass(item_type, (Blob, Text)):
+      self._require_parameter(kwds, 'indexed', False)
+      kwds['indexed'] = True
+    self._require_parameter(kwds, 'required', True)
     if default is None:
       default = []
     self.item_type = item_type
     super(ListProperty, self).__init__(verbose_name,
-                                       required=True,
                                        default=default,
                                        **kwds)
 
--- a/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Tue May 12 15:39:52 2009 +0200
@@ -77,7 +77,7 @@
 
   The syntax for SELECT is fairly straightforward:
 
-  SELECT * FROM <entity>
+  SELECT [* | __key__ ] FROM <entity>
     [WHERE <condition> [AND <condition> ...]]
     [ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
     [LIMIT [<offset>,]<count>]
@@ -144,9 +144,8 @@
       simple types (strings, integers, floats).
 
 
-  SELECT * will return an iterable set of entries, but other operations (schema
-  queries, updates, inserts or field selections) will return alternative
-  result types.
+  SELECT * will return an iterable set of entities; SELECT __key__ will return
+  an iterable set of Keys.
   """
 
   TOKENIZE_REGEX = re.compile(r"""
@@ -229,7 +228,8 @@
       query_count = 1
 
     for i in xrange(query_count):
-      queries.append(datastore.Query(self._entity, _app=self.__app))
+      queries.append(datastore.Query(self._entity, _app=self.__app,
+                                     keys_only=self._keys_only))
 
     logging.log(LOG_LEVEL,
                 'Binding with %i positional args %s and %i keywords %s'
@@ -552,6 +552,9 @@
     Raises:
       BadArgumentError if the filter is invalid (namely non-list with IN)
     """
+    if condition.lower() in ('!=', 'in') and self._keys_only:
+      raise datastore_errors.BadQueryError(
+        'Keys only queries do not support IN or != filters.')
 
     def CloneQueries(queries, n):
       """Do a full copy of the queries and append to the end of the queries.
@@ -675,6 +678,7 @@
 
   __iter__ = Run
 
+  __result_type_regex = re.compile(r'(\*|__key__)')
   __quoted_string_regex = re.compile(r'((?:\'[^\'\n\r]*\')+)')
   __ordinal_regex = re.compile(r':(\d+)$')
   __named_regex = re.compile(r':(\w+)$')
@@ -783,7 +787,8 @@
       True if parsing completed okay.
     """
     self.__Expect('SELECT')
-    self.__Expect('*')
+    result_type = self.__AcceptRegex(self.__result_type_regex)
+    self._keys_only = (result_type == '__key__')
     return self.__From()
 
   def __From(self):
--- a/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Tue May 12 15:39:52 2009 +0200
@@ -143,6 +143,7 @@
     self._word_delimiter_regex = word_delimiter_regex
     if isinstance(kind_or_entity, datastore.Entity):
       self._Entity__key = kind_or_entity._Entity__key
+      self._Entity__unindexed_properties = frozenset(kind_or_entity.unindexed_properties())
       self.update(kind_or_entity)
     else:
       super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
--- a/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Tue May 12 15:39:52 2009 +0200
@@ -674,7 +674,7 @@
   """Provide facilities to export request logs."""
 
   def __init__(self, server, config, output_file,
-               num_days, append, severity, now):
+               num_days, append, severity, now, vhost):
     """Constructor.
 
     Args:
@@ -686,6 +686,7 @@
       append: True if appending to an existing file.
       severity: App log severity to request (0-4); None for no app logs.
       now: POSIX timestamp used for calculating valid dates for num_days.
+      vhost: The virtual host of log messages to get. None for all hosts.
     """
     self.server = server
     self.config = config
@@ -693,6 +694,7 @@
     self.append = append
     self.num_days = num_days
     self.severity = severity
+    self.vhost = vhost
     self.version_id = self.config.version + ".1"
     self.sentinel = None
     self.write_mode = "w"
@@ -770,6 +772,8 @@
       kwds["offset"] = offset
     if self.severity is not None:
       kwds["severity"] = str(self.severity)
+    if self.vhost is not None:
+      kwds["vhost"] = str(self.vhost)
     response = self.server.Send("/api/request_logs", payload=None, **kwds)
     response = response.replace("\r", "\0")
     lines = response.splitlines()
@@ -1789,7 +1793,8 @@
                                    self.options.num_days,
                                    self.options.append,
                                    self.options.severity,
-                                   time.time())
+                                   time.time(),
+                                   self.options.vhost)
     logs_requester.DownloadLogs()
 
   def _RequestLogsOptions(self, parser):
@@ -1813,6 +1818,10 @@
                       help="Severity of app-level log messages to get. "
                       "The range is 0 (DEBUG) through 4 (CRITICAL). "
                       "If omitted, only request logs are returned.")
+    parser.add_option("--vhost", type="string", dest="vhost",
+                      action="store", default=None,
+                      help="The virtual host of log messages to get. "
+                      "If omitted, all log messages are returned.")
 
   def CronInfo(self, now=None, output=sys.stdout):
     """Displays information about cron definitions.
@@ -1834,8 +1843,8 @@
         if not description:
           description = "<no description>"
         print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description,
-                                                          entry.schedule,
-                                                          entry.url)
+                                                          entry.url,
+                                                          entry.schedule)
         schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
         matches = schedule.GetMatches(now, self.options.num_runs)
         for match in matches:
@@ -1853,8 +1862,8 @@
                       help="Number of runs of each cron job to display"
                       "Default is 5")
 
-  def _CheckRequiredUploadOptions(self):
-    """Checks that upload options are present."""
+  def _CheckRequiredLoadOptions(self):
+    """Checks that upload/download options are present."""
     for option in ["filename", "kind", "config_file"]:
       if getattr(self.options, option) is None:
         self.parser.error("Option '%s' is required." % option)
@@ -1863,7 +1872,7 @@
                         "assigned to an endpoint in app.yaml, or provide "
                         "the url of the handler via the 'url' option.")
 
-  def InferUploadUrl(self, appyaml):
+  def InferRemoteApiUrl(self, appyaml):
     """Uses app.yaml to determine the remote_api endpoint.
 
     Args:
@@ -1885,11 +1894,11 @@
             return "http://%s%s" % (server, handler.url)
     return None
 
-  def RunBulkloader(self, **kwargs):
+  def RunBulkloader(self, arg_dict):
     """Invokes the bulkloader with the given keyword arguments.
 
     Args:
-      kwargs: Keyword arguments to pass to bulkloader.Run().
+      arg_dict: Dictionary of arguments to pass to bulkloader.Run().
     """
     try:
       import sqlite3
@@ -1898,17 +1907,10 @@
                     "sqlite3 module (included in python since 2.5).")
       sys.exit(1)
 
-    sys.exit(bulkloader.Run(kwargs))
-
-  def PerformUpload(self, run_fn=None):
-    """Performs a datastore upload via the bulkloader.
+    sys.exit(bulkloader.Run(arg_dict))
 
-    Args:
-      run_fn: Function to invoke the bulkloader, used for testing.
-    """
-    if run_fn is None:
-      run_fn = self.RunBulkloader
-
+  def _SetupLoad(self):
+    """Performs common verification and set up for upload and download."""
     if len(self.args) != 1:
       self.parser.error("Expected <directory> argument.")
 
@@ -1918,11 +1920,11 @@
     self.options.app_id = appyaml.application
 
     if not self.options.url:
-      url = self.InferUploadUrl(appyaml)
+      url = self.InferRemoteApiUrl(appyaml)
       if url is not None:
         self.options.url = url
 
-    self._CheckRequiredUploadOptions()
+    self._CheckRequiredLoadOptions()
 
     if self.options.batch_size < 1:
       self.parser.error("batch_size must be 1 or larger.")
@@ -1934,34 +1936,68 @@
       logging.getLogger().setLevel(logging.DEBUG)
       self.options.debug = True
 
+  def _MakeLoaderArgs(self):
+    return dict([(arg_name, getattr(self.options, arg_name, None)) for
+                 arg_name in (
+        "app_id",
+        "url",
+        "filename",
+        "batch_size",
+        "kind",
+        "num_threads",
+        "bandwidth_limit",
+        "rps_limit",
+        "http_limit",
+        "db_filename",
+        "config_file",
+        "auth_domain",
+        "has_header",
+        "loader_opts",
+        "log_file",
+        "passin",
+        "email",
+        "debug",
+        "exporter_opts",
+        "result_db_filename",
+        )])
+
+  def PerformDownload(self, run_fn=None):
+    """Performs a datastore download via the bulkloader.
+
+    Args:
+      run_fn: Function to invoke the bulkloader, used for testing.
+    """
+    if run_fn is None:
+      run_fn = self.RunBulkloader
+    self._SetupLoad()
+
+    StatusUpdate("Downloading data records.")
+
+    args = self._MakeLoaderArgs()
+    args['download'] = True
+    args['has_header'] = False
+
+    run_fn(args)
+
+  def PerformUpload(self, run_fn=None):
+    """Performs a datastore upload via the bulkloader.
+
+    Args:
+      run_fn: Function to invoke the bulkloader, used for testing.
+    """
+    if run_fn is None:
+      run_fn = self.RunBulkloader
+    self._SetupLoad()
+
     StatusUpdate("Uploading data records.")
 
-    run_fn(app_id=self.options.app_id,
-           url=self.options.url,
-           filename=self.options.filename,
-           batch_size=self.options.batch_size,
-           kind=self.options.kind,
-           num_threads=self.options.num_threads,
-           bandwidth_limit=self.options.bandwidth_limit,
-           rps_limit=self.options.rps_limit,
-           http_limit=self.options.http_limit,
-           db_filename=self.options.db_filename,
-           config_file=self.options.config_file,
-           auth_domain=self.options.auth_domain,
-           has_header=self.options.has_header,
-           loader_opts=self.options.loader_opts,
-           log_file=self.options.log_file,
-           passin=self.options.passin,
-           email=self.options.email,
-           debug=self.options.debug,
+    args = self._MakeLoaderArgs()
+    args['download'] = False
 
-           exporter_opts=None,
-           download=False,
-           result_db_filename=None,
-           )
+    run_fn(args)
 
-  def _PerformUploadOptions(self, parser):
-    """Adds 'upload_data' specific options to the 'parser' passed in.
+  def _PerformLoadOptions(self, parser):
+    """Adds options common to 'upload_data' and 'download_data'.
 
     Args:
       parser: An instance of OptionsParser.
@@ -2000,16 +2036,39 @@
     parser.add_option("--auth_domain", type="string", dest="auth_domain",
                       action="store", default="gmail.com",
                       help="The name of the authorization domain to use.")
+    parser.add_option("--log_file", type="string", dest="log_file",
+                      help="File to write bulkloader logs.  If not supplied "
+                           "then a new log file will be created, named: "
+                           "bulkloader-log-TIMESTAMP.")
+
+  def _PerformUploadOptions(self, parser):
+    """Adds 'upload_data' specific options to the 'parser' passed in.
+
+    Args:
+      parser: An instance of OptionsParser.
+    """
+    self._PerformLoadOptions(parser)
     parser.add_option("--has_header", dest="has_header",
                       action="store_true", default=False,
                       help="Whether the first line of the input file should be"
                       " skipped")
     parser.add_option("--loader_opts", type="string", dest="loader_opts",
-                      help="A string to pass to the Loader.Initialize method.")
-    parser.add_option("--log_file", type="string", dest="log_file",
-                      help="File to write bulkloader logs.  If not supplied "
-                           "then a new log file will be created, named: "
-                           "bulkloader-log-TIMESTAMP.")
+                      help="A string to pass to the Loader.initialize method.")
+
+  def _PerformDownloadOptions(self, parser):
+    """Adds 'download_data' specific options to the 'parser' passed in.
+
+    Args:
+      parser: An instance of OptionsParser.
+    """
+    self._PerformLoadOptions(parser)
+    parser.add_option("--exporter_opts", type="string", dest="exporter_opts",
+                      help="A string to pass to the Exporter.initialize method."
+                      )
+    parser.add_option("--result_db_filename", type="string",
+                      dest="result_db_filename",
+                      action="store",
+                      help="Database to write entities to for download.")
 
   class Action(object):
     """Contains information about a command line action.
@@ -2121,11 +2180,20 @@
           function="PerformUpload",
           usage="%prog [options] upload_data <directory>",
           options=_PerformUploadOptions,
-          short_desc="Upload CSV records to datastore",
+          short_desc="Upload data records to datastore.",
           long_desc="""
-The 'upload_data' command translates CSV records into datastore entities and
+The 'upload_data' command translates input records into datastore entities and
 uploads them into your application's datastore."""),
 
+      "download_data": Action(
+          function="PerformDownload",
+          usage="%prog [options] download_data <directory>",
+          options=_PerformDownloadOptions,
+          short_desc="Download entities from datastore.",
+          long_desc="""
+The 'download_data' command downloads datastore entities and writes them to
+file as CSV or developer defined format."""),
+
 
 
   }
--- a/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Tue May 12 15:39:52 2009 +0200
@@ -1702,8 +1702,11 @@
       duration: The duration of the transfer in seconds.
     """
     if duration > self.threshhold2:
-      self.DecreaseWorkers()
+      logger.debug('Transfer took %s, decreasing workers.', duration)
+      self.DecreaseWorkers(backoff=False)
+      return
     elif duration > self.threshhold1:
+      logger.debug('Transfer took %s, not increasing workers.', duration)
       return
     elif self.enabled:
       if self.backoff_time > 0.0:
@@ -1722,13 +1725,17 @@
                      self.enabled_count)
         self.thread_semaphore.release()
 
-  def DecreaseWorkers(self):
+  def DecreaseWorkers(self, backoff=True):
     """Informs the thread_gate that an item failed to send.
 
     If thread throttling is enabled, this method will cause the
     throttler to allow one fewer thread in the critical section. If
     there is only one thread remaining, failures will result in
     exponential backoff until there is a success.
+
+    Args:
+      backoff: Whether to increase exponential backoff if there is only
+        one thread enabled.
     """
     if self.enabled:
       do_disable = False
@@ -1738,7 +1745,7 @@
           if self.enabled_count > 1:
             do_disable = True
             self.enabled_count -= 1
-          else:
+          elif backoff:
             if self.backoff_time == 0.0:
               self.backoff_time = INITIAL_BACKOFF
             else:
@@ -2138,8 +2145,8 @@
               status = 200
               transferred = True
               transfer_time = self.get_time() - t
-              logger.debug('[%s] %s Transferred %d entities', self.getName(),
-                           item, item.count)
+              logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
+                           self.getName(), item, item.count, transfer_time)
               self.throttle.AddTransfer(RECORDS, item.count)
             except (db.InternalError, db.NotSavedError, db.Timeout,
                     apiproxy_errors.OverQuotaError,
@@ -2169,8 +2176,8 @@
         finally:
           if transferred:
             item.MarkAsTransferred()
+            self.work_queue.task_done()
             self.thread_gate.TransferSuccess(transfer_time)
-            self.work_queue.task_done()
           else:
             item.MarkAsError()
             try:
@@ -2314,6 +2321,7 @@
     if export_result:
       item.Process(export_result, self.num_threads, self.batch_size,
                    self.work_queue)
+    item.state = STATE_GOT
 
 
 class DataSourceThread(_ThreadBase):
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Tue May 12 13:02:10 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Tue May 12 15:39:52 2009 +0200
@@ -1178,6 +1178,7 @@
     'timing',
     'unicodedata',
     'zlib',
+    '_ast',
     '_bisect',
     '_codecs',
     '_codecs_cn',
@@ -3230,6 +3231,7 @@
     'capability_service',
     capability_stub.CapabilityServiceStub())
 
+
   try:
     from google.appengine.api.images import images_stub
     apiproxy_stub_map.apiproxy.RegisterStub(