Load ../../google_appengine_1.1.7/ into
authorSverre Rabbelier <srabbelier@gmail.com>
Sat, 06 Dec 2008 16:52:21 +0000
changeset 686 df109be0567c
parent 685 a440ced9a75f
child 687 4755caf1d7a6
Load ../../google_appengine_1.1.7/ into trunk/thirdparty/google_appengine.
thirdparty/google_appengine/README
thirdparty/google_appengine/RELEASE_NOTES
thirdparty/google_appengine/VERSION
thirdparty/google_appengine/dev_appserver.py
thirdparty/google_appengine/google/appengine/api/apiproxy_stub.py
thirdparty/google_appengine/google/appengine/api/datastore.py
thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py
thirdparty/google_appengine/google/appengine/api/datastore_types.py
thirdparty/google_appengine/google/appengine/api/images/images_stub.py
thirdparty/google_appengine/google/appengine/api/mail_stub.py
thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py
thirdparty/google_appengine/google/appengine/api/urlfetch.py
thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py
thirdparty/google_appengine/google/appengine/api/user_service_stub.py
thirdparty/google_appengine/google/appengine/datastore/datastore_index.py
thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py
thirdparty/google_appengine/google/appengine/ext/admin/__init__.py
thirdparty/google_appengine/google/appengine/ext/admin/templates/css/datastore.css
thirdparty/google_appengine/google/appengine/ext/admin/templates/datastore.html
thirdparty/google_appengine/google/appengine/ext/bulkload/__init__.py
thirdparty/google_appengine/google/appengine/ext/db/__init__.py
thirdparty/google_appengine/google/appengine/ext/webapp/template.py
thirdparty/google_appengine/google/appengine/ext/webapp/util.py
thirdparty/google_appengine/google/appengine/tools/appcfg.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver_index.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver_login.py
thirdparty/google_appengine/google/appengine/tools/os_compat.py
--- a/thirdparty/google_appengine/README	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/README	Sat Dec 06 16:52:21 2008 +0000
@@ -17,7 +17,7 @@
 =====================
 1) Download and install Python 2.5 from http://www.python.org/download/
 2) Download the SDK installer from http://code.google.com/appengine/downloads
-3) Install the SDK by double-clicking on the GoogleAppEngine.dmg file and 
+3) Install the SDK by double-clicking on the GoogleAppEngine.dmg file and
 running the installer.
 
 
@@ -25,7 +25,7 @@
 =====================
 1) Download and install Python 2.5 from http://www.python.org/download/
 2) Download the SDK installer from http://code.google.com/appengine/downloads
-2) Install the SDK by double-clicking on the GoogleAppEngine.msi file and 
+3) Install the SDK by double-clicking on the GoogleAppEngine.msi file and
 running the installer.
 
 
@@ -85,7 +85,7 @@
 
 USING THE SDK
 =======================
-For instructions on getting started with Google App Engine, please see the 
+For instructions on getting started with Google App Engine, please see the
 Google App Engine Getting Started Guide
 
 http://code.google.com/appengine/docs/gettingstarted
--- a/thirdparty/google_appengine/RELEASE_NOTES	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/RELEASE_NOTES	Sat Dec 06 16:52:21 2008 +0000
@@ -3,6 +3,61 @@
 
 App Engine SDK - Release Notes
 
+Version 1.1.7 - November 20, 2008
+=================================
+  - Fixed an issue with urlfetch response headers.
+      http://code.google.com/p/googleappengine/issues/detail?id=877
+
+Version 1.1.6 - November 17, 2008
+=================================
+
+  - Datastore now supports filtering and sorting on the __key__ special
+    property, which evaluates to each entity's key.
+  - Fixed a bug where it was possible to append None to ListProperty.
+  - Datastore appengine.ext.db models allow deletion by key without
+    instantiating a model instance.
+  - Datastore models allow access to key name before put() if key_name given.
+  - Datastore fetch max results and max query offset match production limits.
+  - Fixed an issue in production where query fails with NeedIndexError when
+    a model has two ancestor indexes.
+    http://code.google.com/p/googleappengine/issues/detail?id=423
+  - Allow trailing whitespace in PropertyValueFromString for datetime.
+  - Fixed to_xml on models with binary data in a BlobProperty: they now
+    are base64 encoded.
+    Note: This changes XML serialization.
+      http://code.google.com/p/googleappengine/issues/detail?id=430
+  - Fixed an issue with setting expando attributes.
+      http://code.google.com/p/googleappengine/issues/detail?id=431
+  - Fixed an issue where TypeError was raised instead of NeedIndexError for
+    "merge join" queries, i.e. queries with only equals filters and no ancestor
+    or sort orders, that still need an index.
+      http://code.google.com/p/googleappengine/issues/detail?id=749
+  - URLFetch in the SDK now has the same 5 second timeout to match production.
+  - URLFetch response headers are combined
+      http://code.google.com/p/googleappengine/issues/detail?id=412
+  - URLFetch now uses original method when following a redirect.
+      http://code.google.com/p/googleappengine/issues/detail?id=363
+  - URLFetch logs a warning when using a non standard port.
+      http://code.google.com/p/googleappengine/issues/detail?id=436
+  - URLFetch allows integers as values in request headers.
+  - Enforce response size and API request size limits to match production.
+      http://code.google.com/p/googleappengine/issues/detail?id=447
+  - SDK sanitizes response headers to match production
+      http://code.google.com/p/googleappengine/issues/detail?id=198
+  - Login URLs now require login in the SDK to match production.
+      http://code.google.com/p/googleappengine/issues/detail?id=53
+  - Fixed an issue with long URLs in HTTP 302 redirect responses.
+      http://code.google.com/p/googleappengine/issues/detail?id=407
+  - Fixed an issue with regular expressions in static_files in app.yaml
+      http://code.google.com/p/googleappengine/issues/detail?id=711
+  - SDK only allows "C" locale to match production.
+      http://code.google.com/p/googleappengine/issues/detail?id=356
+  - Support the bufsize positional arg in open()/file().
+  - lstat is aliased to stat.
+  - appcfg handles index building errors more gracefully.
+  - Fixed an issue with symlinks in the path to the Python core libraries.
+
+
 Version 1.1.5 - September 29, 2008
 ==================================
 
--- a/thirdparty/google_appengine/VERSION	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/VERSION	Sat Dec 06 16:52:21 2008 +0000
@@ -1,3 +1,3 @@
-release: "1.1.5"
-timestamp: 1222740096
+release: "1.1.7"
+timestamp: 1227225249
 api_versions: ['1']
--- a/thirdparty/google_appengine/dev_appserver.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/dev_appserver.py	Sat Dec 06 16:52:21 2008 +0000
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2.5
+#!/usr/bin/env python
 #
 # Copyright 2007 Google Inc.
 #
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Base class for implementing API proxy stubs."""
+
+
+
+
+
+
+from google.appengine.runtime import apiproxy_errors
+
+
+MAX_REQUEST_SIZE = 1 << 20
+
+
+class APIProxyStub(object):
+  """Base class for implementing API proxy stub classes.
+
+  To implement an API proxy stub:
+    - Extend this class.
+    - Override __init__ to pass in appropriate default service name.
+    - Implement service methods as _Dynamic_<method>(request, response).
+  """
+
+  def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE):
+    """Constructor.
+
+    Args:
+      service_name: Service name expected for all calls.
+      max_request_size: int, maximum allowable size of the incoming request.  A
+        apiproxy_errors.RequestTooLargeError will be raised if the inbound
+        request exceeds this size.  Default is 1 MB.
+    """
+    self.__service_name = service_name
+    self.__max_request_size = max_request_size
+
+  def MakeSyncCall(self, service, call, request, response):
+    """The main RPC entry point.
+
+    Args:
+      service: Must be name as provided to service_name of constructor.
+      call: A string representing the rpc to make.  Must be part of
+        the underlying services methods and impemented by _Dynamic_<call>.
+      request: A protocol buffer of the type corresponding to 'call'.
+      response: A protocol buffer of the type corresponding to 'call'.
+    """
+    assert service == self.__service_name, ('Expected "%s" service name, '
+                                            'was "%s"' % (self.__service_name,
+                                                          service))
+    if request.ByteSize() > self.__max_request_size:
+      raise apiproxy_errors.RequestTooLargeError(
+          'The request to API call %s.%s() was too large.' % (service, call))
+    messages = []
+    assert request.IsInitialized(messages), messages
+
+    method = getattr(self, '_Dynamic_' + call)
+    method(request, response)
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py	Sat Dec 06 16:52:21 2008 +0000
@@ -243,7 +243,7 @@
     keys: Key or string or list of Keys or strings
 
   Raises:
-    TransactionFailedError, if the Put could not be committed.
+    TransactionFailedError, if the Delete could not be committed.
   """
   keys, multiple = NormalizeAndTypeCheckKeys(keys)
 
@@ -882,7 +882,7 @@
         _ToDatastoreError(err)
       except datastore_errors.NeedIndexError, exc:
         yaml = datastore_index.IndexYamlForQuery(
-          *datastore_index.CompositeIndexForQuery(pb)[:-1])
+          *datastore_index.CompositeIndexForQuery(pb)[1:-1])
         raise datastore_errors.NeedIndexError(
           str(exc) + '\nThis query needs this index:\n' + yaml)
 
@@ -976,7 +976,7 @@
     if isinstance(value, tuple):
       value = list(value)
 
-    datastore_types.ValidateProperty(' ', value)
+    datastore_types.ValidateProperty(' ', value, read_only=True)
     match = self._CheckFilter(filter, value)
     property = match.group(1)
     operator = match.group(3)
@@ -1065,6 +1065,8 @@
 
     property = match.group(1)
     operator = match.group(3)
+    if operator is None:
+      operator = '='
 
     if isinstance(values, tuple):
       values = list(values)
@@ -1087,6 +1089,13 @@
           'Inequality operators (%s) must be on the same property as the '
           'first sort order, if any sort orders are supplied' %
           ', '.join(self.INEQUALITY_OPERATORS))
+    elif property in datastore_types._SPECIAL_PROPERTIES:
+      if property == datastore_types._KEY_SPECIAL_PROPERTY:
+        for value in values:
+          if not isinstance(value, Key):
+            raise datastore_errors.BadFilterError(
+              '%s filter value must be a Key; received %s (a %s)' %
+              (datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value)))
 
     return match
 
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -38,14 +38,16 @@
 import datetime
 import logging
 import os
-import pickle
 import struct
 import sys
 import tempfile
 import threading
 import warnings
 
+import cPickle as pickle
+
 from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub
 from google.appengine.api import datastore
 from google.appengine.api import datastore_admin
 from google.appengine.api import datastore_errors
@@ -64,12 +66,18 @@
 datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
 
 
+_MAXIMUM_RESULTS = 1000
+
+
+_MAX_QUERY_OFFSET = 4000
+
 class _StoredEntity(object):
   """Simple wrapper around an entity stored by the stub.
 
   Public properties:
-    native: Native protobuf Python object, entity_pb.EntityProto.
-    encoded: Encoded binary representation of above protobuf.
+    protobuf: Native protobuf Python object, entity_pb.EntityProto.
+    encoded_protobuf: Encoded binary representation of above protobuf.
+    native: datastore.Entity instance.
   """
 
   def __init__(self, entity):
@@ -78,12 +86,14 @@
     Args:
       entity: entity_pb.EntityProto to store.
     """
-    self.native = entity
+    self.protobuf = entity
 
-    self.encoded = entity.Encode()
+    self.encoded_protobuf = entity.Encode()
+
+    self.native = datastore.Entity._FromPb(entity)
 
 
-class DatastoreFileStub(object):
+class DatastoreFileStub(apiproxy_stub.APIProxyStub):
   """ Persistent stub for the Python datastore API.
 
   Stores all entities in memory, and persists them to a file as pickled
@@ -114,8 +124,12 @@
     users.User: entity_pb.PropertyValue.kUserValueGroup,
     }
 
-  def __init__(self, app_id, datastore_file, history_file,
-               require_indexes=False):
+  def __init__(self,
+               app_id,
+               datastore_file,
+               history_file,
+               require_indexes=False,
+               service_name='datastore_v3'):
     """Constructor.
 
     Initializes and loads the datastore from the backing files, if they exist.
@@ -128,7 +142,10 @@
           datastore_file.
       require_indexes: bool, default False.  If True, composite indexes must
           exist in index.yaml for queries that need them.
+      service_name: Service name expected for all calls.
     """
+    super(DatastoreFileStub, self).__init__(service_name)
+
 
     assert isinstance(app_id, basestring) and app_id != ''
     self.__app_id = app_id
@@ -137,6 +154,8 @@
 
     self.__entities = {}
 
+    self.__schema_cache = {}
+
     self.__tx_snapshot = {}
 
     self.__queries = {}
@@ -170,6 +189,37 @@
     self.__queries = {}
     self.__transactions = {}
     self.__query_history = {}
+    self.__schema_cache = {}
+
+  def _AppKindForKey(self, key):
+    """ Get (app, kind) tuple from given key.
+
+    The (app, kind) tuple is used as an index into several internal
+    dictionaries, e.g. __entities.
+
+    Args:
+      key: entity_pb.Reference
+
+    Returns:
+      Tuple (app, kind), both are unicode strings.
+    """
+    last_path = key.path().element_list()[-1]
+    return key.app(), last_path.type()
+
+  def _StoreEntity(self, entity):
+    """ Store the given entity.
+
+    Args:
+      entity: entity_pb.EntityProto
+    """
+    key = entity.key()
+    app_kind = self._AppKindForKey(key)
+    if app_kind not in self.__entities:
+      self.__entities[app_kind] = {}
+    self.__entities[app_kind][key] = _StoredEntity(entity)
+
+    if app_kind in self.__schema_cache:
+      del self.__schema_cache[app_kind]
 
   def Read(self):
     """ Reads the datastore and history files into memory.
@@ -198,11 +248,9 @@
           raise datastore_errors.InternalError(error_msg %
                                                (self.__datastore_file, e))
 
+        self._StoreEntity(entity)
+
         last_path = entity.key().path().element_list()[-1]
-        app_kind = (entity.key().app(), last_path.type())
-        kind_dict = self.__entities.setdefault(app_kind, {})
-        kind_dict[entity.key()] = _StoredEntity(entity)
-
         if last_path.has_id() and last_path.id() >= self.__next_id:
           self.__next_id = last_path.id() + 1
 
@@ -234,7 +282,7 @@
       encoded = []
       for kind_dict in self.__entities.values():
         for entity in kind_dict.values():
-          encoded.append(entity.encoded)
+          encoded.append(entity.encoded_protobuf)
 
       self.__WritePickled(encoded, self.__datastore_file)
 
@@ -276,7 +324,11 @@
       return
 
     tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb')
-    pickle.dump(obj, tmpfile, 1)
+
+    pickler = pickle.Pickler(tmpfile, protocol=1)
+    pickler.fast = True
+    pickler.dump(obj)
+
     tmpfile.close()
 
     self.__file_lock.acquire()
@@ -296,14 +348,12 @@
     """ The main RPC entry point. service must be 'datastore_v3'. So far, the
     supported calls are 'Get', 'Put', 'RunQuery', 'Next', and 'Count'.
     """
-
-    assert service == 'datastore_v3'
+    super(DatastoreFileStub, self).MakeSyncCall(service,
+                                                call,
+                                                request,
+                                                response)
 
     explanation = []
-    assert request.IsInitialized(explanation), explanation
-
-    (getattr(self, "_Dynamic_" + call))(request, response)
-
     assert response.IsInitialized(explanation), explanation
 
   def QueryHistory(self):
@@ -322,7 +372,6 @@
       assert clone.has_key()
       assert clone.key().path().element_size() > 0
 
-      app = clone.key().app()
       last_path = clone.key().path().element_list()[-1]
       if last_path.id() == 0 and not last_path.has_name():
         self.__id_lock.acquire()
@@ -343,9 +392,7 @@
 
     try:
       for clone in clones:
-        last_path = clone.key().path().element_list()[-1]
-        kind_dict = self.__entities.setdefault((app, last_path.type()), {})
-        kind_dict[clone.key()] = _StoredEntity(clone)
+        self._StoreEntity(clone)
     finally:
       self.__entities_lock.release()
 
@@ -357,12 +404,11 @@
 
   def _Dynamic_Get(self, get_request, get_response):
     for key in get_request.key_list():
-        app = key.app()
-        last_path = key.path().element_list()[-1]
+        app_kind = self._AppKindForKey(key)
 
         group = get_response.add_entity()
         try:
-          entity = self.__entities[app, last_path.type()][key].native
+          entity = self.__entities[app_kind][key].protobuf
         except KeyError:
           entity = None
 
@@ -374,12 +420,13 @@
     self.__entities_lock.acquire()
     try:
       for key in delete_request.key_list():
+        app_kind = self._AppKindForKey(key)
         try:
-          app = key.app()
-          kind = key.path().element_list()[-1].type()
-          del self.__entities[app, kind][key]
-          if not self.__entities[app, kind]:
-            del self.__entities[app, kind]
+          del self.__entities[app_kind][key]
+          if not self.__entities[app_kind]:
+            del self.__entities[app_kind]
+
+          del self.__schema_cache[app_kind]
         except KeyError:
           pass
 
@@ -396,12 +443,15 @@
     else:
       self.__tx_lock.release()
 
+    if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
+       raise apiproxy_errors.ApplicationError(
+         datastore_pb.Error.BAD_REQUEST, "Too big query offset.")
+
     app = query.app()
 
     if self.__require_indexes:
-      required_index = datastore_index.CompositeIndexForQuery(query)
-      if required_index is not None:
-        kind, ancestor, props, num_eq_filters = required_index
+      required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
+      if required:
         required_key = kind, ancestor, props
         indexes = self.__indexes.get(app)
         if not indexes:
@@ -432,7 +482,7 @@
     try:
       query.set_app(app)
       results = self.__entities[app, query.kind()].values()
-      results = [datastore.Entity._FromPb(entity.native) for entity in results]
+      results = [entity.native for entity in results]
     except KeyError:
       results = []
 
@@ -456,24 +506,24 @@
       prop = filt.property(0).name().decode('utf-8')
       op = operators[filt.op()]
 
+      filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
+                         for filter_prop in filt.property_list()]
+
       def passes(entity):
         """ Returns True if the entity passes the filter, False otherwise. """
-        entity_vals = entity.get(prop, [])
+        if prop in datastore_types._SPECIAL_PROPERTIES:
+          entity_vals = self.__GetSpecialPropertyValue(entity, prop)
+        else:
+          entity_vals = entity.get(prop, [])
+
         if not isinstance(entity_vals, list):
           entity_vals = [entity_vals]
 
-        entity_property_list = [datastore_types.ToPropertyPb(prop, value)
-                                for value in entity_vals]
-
-        for entity_prop in entity_property_list:
-          fixed_entity_val = datastore_types.FromPropertyPb(entity_prop)
-
+        for fixed_entity_val in entity_vals:
           if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
             continue
 
-          for filter_prop in filt.property_list():
-            filter_val = datastore_types.FromPropertyPb(filter_prop)
-
+          for filter_val in filter_val_list:
             fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
               fixed_entity_val.__class__)
             filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
@@ -499,7 +549,11 @@
       results = filter(passes, results)
 
     def has_prop_indexed(entity, prop):
-      """Returns True if prop is in the entity and is not a raw property."""
+      """Returns True if prop is in the entity and is not a raw property, or
+      is a special property."""
+      if prop in datastore_types._SPECIAL_PROPERTIES:
+        return True
+
       values = entity.get(prop, [])
       if not isinstance(values, (tuple, list)):
         values = [values]
@@ -523,13 +577,17 @@
 
         reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
 
-        a_val = a[prop]
-        if isinstance(a_val, list):
-          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
+        if prop in datastore_types._SPECIAL_PROPERTIES:
+          a_val = self.__GetSpecialPropertyValue(a, prop)
+          b_val = self.__GetSpecialPropertyValue(b, prop)
+        else:
+          a_val = a[prop]
+          if isinstance(a_val, list):
+            a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
 
-        b_val = b[prop]
-        if isinstance(b_val, list):
-          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
+          b_val = b[prop]
+          if isinstance(b_val, list):
+            b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
 
         cmped = order_compare_properties(a_val, b_val)
 
@@ -573,6 +631,8 @@
       offset = query.offset()
     if query.has_limit():
       limit = query.limit()
+    if limit > _MAXIMUM_RESULTS:
+      limit = _MAXIMUM_RESULTS
     results = results[offset:limit + offset]
 
     clone = datastore_pb.Query()
@@ -584,7 +644,6 @@
       self.__query_history[clone] = 1
     self.__WriteHistory()
 
-    results = [e._ToPb() for e in results]
     self.__cursor_lock.acquire()
     cursor = self.__next_cursor
     self.__next_cursor += 1
@@ -594,7 +653,6 @@
     query_result.mutable_cursor().set_cursor(cursor)
     query_result.set_more_results(len(results) > 0)
 
-
   def _Dynamic_Next(self, next_request, query_result):
     cursor = next_request.cursor().cursor()
 
@@ -605,13 +663,12 @@
                                              'Cursor %d not found' % cursor)
 
     count = next_request.count()
-    for r in results[:count]:
-      query_result.add_result().CopyFrom(r)
+    results_pb = [r._ToPb() for r in results[:count]]
+    query_result.result_list().extend(results_pb)
     del results[:count]
 
     query_result.set_more_results(len(results) > 0)
 
-
   def _Dynamic_Count(self, query, integer64proto):
     query_result = datastore_pb.QueryResult()
     self._Dynamic_RunQuery(query, query_result)
@@ -620,7 +677,6 @@
     integer64proto.set_value(count)
     del self.__queries[cursor]
 
-
   def _Dynamic_BeginTransaction(self, request, transaction):
     self.__tx_handle_lock.acquire()
     handle = self.__next_tx_handle
@@ -670,16 +726,20 @@
 
     for app, kind in self.__entities:
       if app == app_str:
+        app_kind = (app, kind)
+        if app_kind in self.__schema_cache:
+          kinds.append(self.__schema_cache[app_kind])
+          continue
+
         kind_pb = entity_pb.EntityProto()
         kind_pb.mutable_key().set_app('')
         kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
         kind_pb.mutable_entity_group()
-        kinds.append(kind_pb)
 
         props = {}
 
-        for entity in self.__entities[(app, kind)].values():
-          for prop in entity.native.property_list():
+        for entity in self.__entities[app_kind].values():
+          for prop in entity.protobuf.property_list():
             if prop.name() not in props:
               props[prop.name()] = entity_pb.PropertyValue()
             props[prop.name()].MergeFrom(prop.value())
@@ -710,7 +770,11 @@
           prop_pb.set_name(name)
           prop_pb.mutable_value().CopyFrom(value_pb)
 
-    schema.kind_list().extend(kinds)
+        kinds.append(kind_pb)
+        self.__schema_cache[app_kind] = kind_pb
+
+    for kind_pb in kinds:
+      schema.add_kind().CopyFrom(kind_pb)
 
   def _Dynamic_CreateIndex(self, index, id_response):
     if index.id() != 0:
@@ -790,3 +854,23 @@
           return stored_index
 
     return None
+
+  @classmethod
+  def __GetSpecialPropertyValue(cls, entity, property):
+    """Returns an entity's value for a special property.
+
+    Right now, the only special property is __key__, whose value is the
+    entity's key.
+
+    Args:
+      entity: datastore.Entity
+
+    Returns:
+      property value. For __key__, a datastore_types.Key.
+
+    Raises:
+      AssertionError, if the given property is not special.
+    """
+    assert property in datastore_types._SPECIAL_PROPERTIES
+    if property == datastore_types._KEY_SPECIAL_PROPERTY:
+      return entity.key()
--- a/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Sat Dec 06 16:52:21 2008 +0000
@@ -56,6 +56,9 @@
 
 RESERVED_PROPERTY_NAME = re.compile('^__.*__$')
 
+_KEY_SPECIAL_PROPERTY = '__key__'
+_SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
+
 class UtcTzinfo(datetime.tzinfo):
   def utcoffset(self, dt): return datetime.timedelta(0)
   def dst(self, dt): return datetime.timedelta(0)
@@ -855,6 +858,15 @@
     raise TypeError('Blob() argument should be str instance, not %s' %
                     type(arg).__name__)
 
+  def ToXml(self):
+    """Output a blob as XML.
+
+    Returns:
+      Base64 encoded version of itself for safe insertion in to an XML document.
+    """
+    encoded = base64.urlsafe_b64encode(self)
+    return saxutils.escape(encoded)
+
 
 _PROPERTY_MEANINGS = {
 
@@ -1009,7 +1021,7 @@
 assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
 
 
-def ValidateProperty(name, values):
+def ValidateProperty(name, values, read_only=False):
   """Helper function for validating property values.
 
   Args:
@@ -1023,7 +1035,8 @@
     type-specific criteria.
   """
   ValidateString(name, 'property name', datastore_errors.BadPropertyError)
-  if RESERVED_PROPERTY_NAME.match(name):
+
+  if not read_only and RESERVED_PROPERTY_NAME.match(name):
     raise datastore_errors.BadPropertyError(
         '%s is a reserved property name.' % name)
 
@@ -1421,6 +1434,7 @@
     ValueError if type_ is datetime and value_string has a timezone offset.
   """
   if type_ == datetime.datetime:
+    value_string = value_string.strip()
     if value_string[-6] in ('+', '-'):
       if value_string[-5:] == '00:00':
         value_string = value_string[:-6]
--- a/thirdparty/google_appengine/google/appengine/api/images/images_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/images/images_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -26,32 +26,23 @@
 from PIL import _imaging
 from PIL import Image
 
+from google.appengine.api import apiproxy_stub
 from google.appengine.api import images
 from google.appengine.api.images import images_service_pb
 from google.appengine.runtime import apiproxy_errors
 
 
-class ImagesServiceStub(object):
+class ImagesServiceStub(apiproxy_stub.APIProxyStub):
   """Stub version of images API to be used with the dev_appserver."""
 
-  def __init__(self):
-    """Preloads PIL to load all modules in the unhardened environment."""
-    Image.init()
-
-  def MakeSyncCall(self, service, call, request, response):
-    """Main entry point.
+  def __init__(self, service_name='images'):
+    """Preloads PIL to load all modules in the unhardened environment.
 
     Args:
-      service: str, must be 'images'.
-      call: str, name of the RPC to make, must be part of ImagesService.
-      request: pb object, corresponding args to the 'call' argument.
-      response: pb object, return value for the 'call' argument.
+      service_name: Service name expected for all calls.
     """
-    assert service == "images"
-    assert request.IsInitialized()
-
-    attr = getattr(self, "_Dynamic_" + call)
-    attr(request, response)
+    super(ImagesServiceStub, self).__init__(service_name)
+    Image.init()
 
   def _Dynamic_Transform(self, request, response):
     """Trivial implementation of ImagesService::Transform.
--- a/thirdparty/google_appengine/google/appengine/api/mail_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/mail_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -31,38 +31,10 @@
 import subprocess
 import smtplib
 
-
-class ServiceStub(object):
-  """Service stub base class used to forward requests to methods.
-
-  Use this base class to defined service stub classes.  Instead of overriding
-  MakeSyncCall, the default implementation forwards the call to appropriate
-  sub-class method.
-
-  If the sub class defines a static string 'SERVICE', it will also check
-  to make sure that calls to this service stub are always made to that named
-  service.
-  """
-
-  def MakeSyncCall(self, service, call, request, response):
-    """The main RPC entry point.
-
-    Args:
-      service: Must be name as defined by sub class variable SERVICE.
-      call: A string representing the rpc to make.  Must be part of
-        MailService.
-      request: A protocol buffer of the type corresponding to 'call'.
-      response: A protocol buffer of the type corresponding to 'call'.
-    """
-    assert not hasattr(self, 'SERVICE') or service == self.SERVICE
-    explanation = []
-    assert request.IsInitialized(explanation), explanation
-
-    attr = getattr(self, '_Dynamic_' + call)
-    attr(request, response)
+from google.appengine.api import apiproxy_stub
 
 
-class MailServiceStub(ServiceStub):
+class MailServiceStub(apiproxy_stub.APIProxyStub):
   """Python only mail service stub.
 
   This stub does not actually attempt to send email.  instead it merely logs
@@ -74,7 +46,6 @@
     user: User to log in to SMTP server as.
     password: Password for SMTP server user.
   """
-  SERVICE = 'mail'
 
   def __init__(self,
                host=None,
@@ -82,7 +53,20 @@
                user='',
                password='',
                enable_sendmail=False,
-               show_mail_body=False):
+               show_mail_body=False,
+               service_name='mail'):
+    """Constructor.
+
+    Args:
+      host: Host of SMTP mail server.
+      post: Port of SMTP mail server.
+      user: Sending user of SMTP mail.
+      password: SMTP password.
+      enable_sendmail: Whether sendmail enabled or not.
+      show_mail_body: Whether to show mail body in log.
+      service_name: Service name expected for all calls.
+    """
+    super(MailServiceStub, self).__init__(service_name)
     self._smtp_host = host
     self._smtp_port = port
     self._smtp_user = user
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -22,6 +22,7 @@
 import logging
 import time
 
+from google.appengine.api import apiproxy_stub
 from google.appengine.api import memcache
 from google.appengine.api.memcache import memcache_service_pb
 
@@ -91,19 +92,21 @@
     return self.locked and not self.CheckExpired()
 
 
-class MemcacheServiceStub(object):
+class MemcacheServiceStub(apiproxy_stub.APIProxyStub):
   """Python only memcache service stub.
 
   This stub keeps all data in the local process' memory, not in any
   external servers.
   """
 
-  def __init__(self, gettime=time.time):
+  def __init__(self, gettime=time.time, service_name='memcache'):
     """Initializer.
 
     Args:
       gettime: time.time()-like function used for testing.
+      service_name: Service name expected for all calls.
     """
+    super(MemcacheServiceStub, self).__init__(service_name)
     self._gettime = gettime
     self._ResetStats()
 
@@ -116,22 +119,6 @@
     self._byte_hits = 0
     self._cache_creation_time = self._gettime()
 
-  def MakeSyncCall(self, service, call, request, response):
-    """The main RPC entry point.
-
-    Args:
-      service: Must be name as defined by sub class variable SERVICE.
-      call: A string representing the rpc to make.  Must be part of
-        MemcacheService.
-      request: A protocol buffer of the type corresponding to 'call'.
-      response: A protocol buffer of the type corresponding to 'call'.
-    """
-    assert service == 'memcache'
-    assert request.IsInitialized()
-
-    attr = getattr(self, '_Dynamic_' + call)
-    attr(request, response)
-
   def _GetKey(self, key):
     """Retrieves a CacheEntry from the cache if it hasn't expired.
 
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch.py	Sat Dec 06 16:52:21 2008 +0000
@@ -249,7 +249,7 @@
   for key, value in headers.iteritems():
     header_proto = request.add_header()
     header_proto.set_key(key)
-    header_proto.set_value(value)
+    header_proto.set_value(str(value))
 
   request.set_followredirects(follow_redirects)
 
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -22,8 +22,10 @@
 import httplib
 import logging
 import socket
+import urllib
 import urlparse
 
+from google.appengine.api import apiproxy_stub
 from google.appengine.api import urlfetch
 from google.appengine.api import urlfetch_errors
 from google.appengine.api import urlfetch_service_pb
@@ -41,25 +43,33 @@
   httplib.TEMPORARY_REDIRECT,
 ])
 
+PORTS_ALLOWED_IN_PRODUCTION = (
+    None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085',
+    '8086', '8087', '8088', '8089', '8188', '8444', '8990')
 
-class URLFetchServiceStub(object):
+_API_CALL_DEADLINE = 5.0
+
+
+_UNTRUSTED_REQUEST_HEADERS = frozenset([
+  'content-length',
+  'host',
+  'referer',
+  'user-agent',
+  'vary',
+  'via',
+  'x-forwarded-for',
+])
+
+class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
   """Stub version of the urlfetch API to be used with apiproxy_stub_map."""
 
-  def MakeSyncCall(self, service, call, request, response):
-    """The main RPC entry point.
+  def __init__(self, service_name='urlfetch'):
+    """Initializer.
 
-    Arg:
-      service: Must be 'urlfetch'.
-      call: A string representing the rpc to make.  Must be part of
-        URLFetchService.
-      request: A protocol buffer of the type corresponding to 'call'.
-      response: A protocol buffer of the type corresponding to 'call'.
+    Args:
+      service_name: Service name expected for all calls.
     """
-    assert service == 'urlfetch'
-    assert request.IsInitialized()
-
-    attr = getattr(self, '_Dynamic_' + call)
-    attr(request, response)
+    super(URLFetchServiceStub, self).__init__(service_name)
 
   def _Dynamic_Fetch(self, request, response):
     """Trivial implementation of URLFetchService::Fetch().
@@ -93,6 +103,11 @@
       raise apiproxy_errors.ApplicationError(
         urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
 
+    sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS,
+                                                  request.header_list())
+    request.clear_header()
+    request.header_list().extend(sanitized_headers)
+
     self._RetrieveURL(request.url(), payload, method,
                       request.header_list(), response,
                       follow_redirects=request.followredirects())
@@ -120,7 +135,15 @@
     last_host = ''
 
     for redirect_number in xrange(MAX_REDIRECTS + 1):
-      (protocol, host, path, parameters, query, fragment) = urlparse.urlparse(url)
+      parsed = urlparse.urlparse(url)
+      protocol, host, path, parameters, query, fragment = parsed
+
+      port = urllib.splitport(urllib.splituser(host)[1])[1]
+
+      if port not in PORTS_ALLOWED_IN_PRODUCTION:
+        logging.warning(
+          'urlfetch received %s ; port %s is not allowed in production!' %
+          (url, port))
 
       if host == '' and protocol == '':
         host = last_host
@@ -159,11 +182,14 @@
         else:
           full_path = path
 
+        orig_timeout = socket.getdefaulttimeout()
         try:
+          socket.setdefaulttimeout(_API_CALL_DEADLINE)
           connection.request(method, full_path, payload, adjusted_headers)
           http_response = connection.getresponse()
           http_response_data = http_response.read()
         finally:
+          socket.setdefaulttimeout(orig_timeout)
           connection.close()
       except (httplib.error, socket.error, IOError), e:
         raise apiproxy_errors.ApplicationError(
@@ -176,8 +202,6 @@
           logging.error(error_msg)
           raise apiproxy_errors.ApplicationError(
               urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
-        else:
-          method = 'GET'
       else:
         response.set_statuscode(http_response.status)
         response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
@@ -195,3 +219,12 @@
       logging.error(error_msg)
       raise apiproxy_errors.ApplicationError(
           urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+
+  def _SanitizeHttpHeaders(self, untrusted_headers, headers):
+    """Cleans "unsafe" headers from the HTTP request/response.
+
+    Args:
+      untrusted_headers: set of untrusted headers names
+      headers: list of string pairs, first is header name and the second is header's value
+    """
+    return (h for h in headers if h.key().lower() not in untrusted_headers)
--- a/thirdparty/google_appengine/google/appengine/api/user_service_stub.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_stub.py	Sat Dec 06 16:52:21 2008 +0000
@@ -21,6 +21,7 @@
 import os
 import urllib
 import urlparse
+from google.appengine.api import apiproxy_stub
 from google.appengine.api import user_service_pb
 
 
@@ -28,22 +29,25 @@
 _DEFAULT_LOGOUT_URL = 'https://www.google.com/accounts/Logout?continue=%s'
 
 
-class UserServiceStub(object):
+class UserServiceStub(apiproxy_stub.APIProxyStub):
   """Trivial implementation of the UserService."""
 
   def __init__(self,
                login_url=_DEFAULT_LOGIN_URL,
-               logout_url=_DEFAULT_LOGOUT_URL):
+               logout_url=_DEFAULT_LOGOUT_URL,
+               service_name='user'):
     """Initializer.
 
     Args:
       login_url: String containing the URL to use for logging in.
       logout_url: String containing the URL to use for logging out.
+      service_name: Service name expected for all calls.
 
     Note: Both the login_url and logout_url arguments must contain one format
     parameter, which will be replaced with the continuation URL where the user
     should be redirected after log-in or log-out has been completed.
     """
+    super(UserServiceStub, self).__init__(service_name)
     self.__num_requests = 0
     self._login_url = login_url
     self._logout_url = logout_url
@@ -53,22 +57,6 @@
   def num_requests(self):
     return self.__num_requests
 
-  def MakeSyncCall(self, service, call, request, response):
-    """The apiproxy entry point.
-
-    Args:
-      service: must be 'user'
-      call: string representing which function to call
-      request: the URL to redirect to, a base.StringProto
-      response: the URL, a base.StringProto
-
-    Currently, CreateLoginURL and CreateLogoutURL are supported.
-    """
-    assert service == 'user'
-
-    method = getattr(self, "_Dynamic_" + call)
-    method(request, response)
-
   def _Dynamic_CreateLoginURL(self, request, response):
     """Trivial implementation of UserService.CreateLoginURL().
 
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_index.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_index.py	Sat Dec 06 16:52:21 2008 +0000
@@ -49,6 +49,7 @@
 
 
 
+from google.appengine.api import datastore_types
 from google.appengine.api import validation
 from google.appengine.api import yaml_errors
 from google.appengine.api import yaml_object
@@ -249,8 +250,8 @@
     query: A datastore_pb.Query instance.
 
   Returns:
-    None if no composite index is needed for this query.  Otherwise,
-    a tuple of the form (kind, ancestor, (prop1, prop2, ...), neq) where:
+    A tuple of the form (required, kind, ancestor, (prop1, prop2, ...), neq):
+      required: boolean, whether the index is required
       kind: the kind or None;
       ancestor: True if this is an ancestor query;
       prop1, prop2, ...: tuples of the form (name, direction) where:
@@ -258,6 +259,8 @@
         direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
       neq: the number of prop tuples corresponding to equality filters.
   """
+  required = True
+
   kind = query.kind()
   ancestor = query.has_ancestor()
   filters = query.filter_list()
@@ -269,7 +272,7 @@
     assert nprops == 1, 'Filter has %s properties, expected 1' % nprops
 
   if ancestor and not kind and not filters and not orders:
-    return None
+    required = False
 
   eq_filters = [f for f in filters if f.op() in EQUALITY_OPERATORS]
   ineq_filters = [f for f in filters if f.op() in INEQUALITY_OPERATORS]
@@ -279,7 +282,9 @@
 
   if (kind and eq_filters and not ineq_filters and not exists_filters and
       not orders):
-    return None
+    names = set(f.property(0).name() for f in eq_filters)
+    if not names.intersection(datastore_types._SPECIAL_PROPERTIES):
+      required = False
 
   ineq_property = None
   if ineq_filters:
@@ -325,13 +330,13 @@
 
   if (kind and not ancestor and
       (not props or (len(props) == 1 and props[0][1] == ASCENDING))):
-    return None
+    required = False
 
   unique_names = set(name for name, dir in props)
   if len(props) > 1 and len(unique_names) == 1:
-    return None
+    required = False
 
-  return (kind, ancestor, tuple(props), len(eq_filters))
+  return (required, kind, ancestor, tuple(props), len(eq_filters))
 
 
 def IndexYamlForQuery(kind, ancestor, props):
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Sat Dec 06 16:52:21 2008 +0000
@@ -369,14 +369,14 @@
   ANCESTOR_FIRST =    2
   FILTER_FIRST =    3
 
-  _Plan_NAMES = {
+  _Hint_NAMES = {
     1: "ORDER_FIRST",
     2: "ANCESTOR_FIRST",
     3: "FILTER_FIRST",
   }
 
-  def Plan_Name(cls, x): return cls._Plan_NAMES.get(x, "")
-  Plan_Name = classmethod(Plan_Name)
+  def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
+  Hint_Name = classmethod(Hint_Name)
 
   has_app_ = 0
   app_ = ""
--- a/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Sat Dec 06 16:52:21 2008 +0000
@@ -141,6 +141,14 @@
         queries.append(arg + '=' + urllib.quote_plus(self.request.get(arg)))
     return self.request.path + '?' + '&'.join(queries)
 
+  def in_production(self):
+    """Detects if app is running in production.
+
+    Returns a boolean.
+    """
+    server_software = os.environ['SERVER_SOFTWARE']
+    return not server_software.startswith('Development')
+
 
 class DefaultPageHandler(BaseRequestHandler):
   """Redirects to the Datastore application by default."""
@@ -309,6 +317,9 @@
 
     if values['show_stats']:
       memcache_stats = memcache.get_stats()
+      if not memcache_stats:
+        memcache_stats = {'hits': 0, 'misses': 0, 'byte_hits': 0, 'items': 0,
+                          'bytes': 0, 'oldest_item_age': 0}
       values['stats'] = memcache_stats
       try:
         hitratio = memcache_stats['hits'] * 100 / (memcache_stats['hits']
@@ -467,29 +478,17 @@
 
   PATH = '/datastore'
 
-  SCHEMA_CACHE_TIMEOUT = 60
+  def get_kinds(self):
+    """Get sorted list of kind names the datastore knows about.
 
-  def get_kinds(self, cache={}):
-    """Return sorted list of kind names the datastore knows about.
-
-    The list of kinds is cached for a short time.
+    This should only be called in the development environment as GetSchema is
+    expensive and no caching is done.
     """
-    server_software = os.environ['SERVER_SOFTWARE']
-    in_production = not server_software.startswith('Development')
-
-    if in_production and ('kinds' in cache):
-      if cache['kinds_timestamp'] + self.SCHEMA_CACHE_TIMEOUT > time.time():
-        return cache['kinds']
-      else:
-        del cache['kinds']
     schema = datastore_admin.GetSchema()
     kinds = []
     for entity_proto in schema:
       kinds.append(entity_proto.key().path().element_list()[-1].type())
     kinds.sort()
-    if in_production:
-      cache['kinds'] = kinds
-      cache['kinds_timestamp'] = time.time()
     return kinds
 
   def get(self):
@@ -553,9 +552,16 @@
       })
     current_page += 1
 
+    in_production = self.in_production()
+    if in_production:
+      kinds = None
+    else:
+      kinds = self.get_kinds()
+
     values = {
       'request': self.request,
-      'kinds': self.get_kinds(),
+      'in_production': in_production,
+      'kinds': kinds,
       'kind': self.request.get('kind'),
       'order': self.request.get('order'),
       'headers': headers,
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/datastore.css	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/datastore.css	Sat Dec 06 16:52:21 2008 +0000
@@ -2,6 +2,13 @@
   margin-bottom: 1em;
 }
 
+#hint {
+  background-color: #F6F9FF;
+  border: 1px solid #E5ECF9;
+  margin-bottom: 1em;
+  padding: 0.5em 1em;
+}
+
 #message {
   color: red;
   position: relative;
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/datastore.html	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/datastore.html	Sat Dec 06 16:52:21 2008 +0000
@@ -8,6 +8,20 @@
   <script type="text/javascript">
   //<![CDATA[
 
+  {% if in_production %}
+  function manageCreateButton() {
+    var input = document.getElementById("kind_input");
+    var button = document.getElementById("create_button");
+    if (input && button) {
+      if (input.value.length == 0) {
+        button.disabled = true;
+      } else {
+        button.disabled = false;
+      }
+    }
+  }
+  {% endif %}
+
   {% if entities %}
   function checkAllEntities() {
     var allCheckBox = document.getElementById("allkeys");
@@ -47,6 +61,13 @@
 {% block body %}
   <h3>Datastore Viewer</h3>
 
+  {% if in_production %}
+  <div id="hint">
+    The <a href="http://appengine.google.com/datastore/explorer?&app_id={{ application_name }}">Admin Console Data Viewer</a>
+    allows you to run GQL queries and much more!
+  </div>
+  {% endif %}
+
   {% if message %}
   <div id="message">
     {{ message }}
@@ -58,18 +79,22 @@
     Results <span class="count">{{ start|add:1 }}</span> - <span class="count">{{ entities|length|add:start }}</span> of <span class="count">{{ total }}</span>
   </div>
   {% endif %}
-  
-  {% if kinds %}
+
+  {% if kinds or in_production %}
   <form action="{{ request.path }}" method="get">
     <div id="datastore_search">
       <span class="field">
         <span class="name">Entity Kind:</span>
         <span class="value">
+          {% if in_production %}
+          <input id="kind_input" name="kind" type="text" size="8" value="{{ kind|escape }}" onkeyup="manageCreateButton()" onkeydown="manageCreateButton()"/>
+          {% else %}
           <select name="kind" id="kind_input">
             {% for a_kind in kinds %}
             <option value="{{ a_kind|escape }}"{% ifequal a_kind kind %} selected="selected"{% endifequal %}>{{ a_kind|escape }}</option>
             {% endfor %}
           </select>
+          {% endif %}
         </span>
       </span>
       <span class="buttons">
@@ -133,7 +158,6 @@
             </div>
           </div>
         </div>
-    
     </form>
   {% else %}
     {% if kind %}
@@ -146,6 +170,9 @@
   <script type="text/javascript">
   //<![CDATA[
 
+  {% if in_production %}
+  manageCreateButton();
+  {% endif %}
   updateDeleteButtonAndCheckbox();
   document.getElementById("kind_input").focus();
 
--- a/thirdparty/google_appengine/google/appengine/ext/bulkload/__init__.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/bulkload/__init__.py	Sat Dec 06 16:52:21 2008 +0000
@@ -382,8 +382,7 @@
           output.append('error:\n%s' % stacktrace)
           return (httplib.BAD_REQUEST, ''.join(output))
 
-    for entity in entities:
-      datastore.Put(entity)
+    datastore.Put(entities)
 
     return (httplib.OK, ''.join(output))
 
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Sat Dec 06 16:52:21 2008 +0000
@@ -77,7 +77,6 @@
 
 
 
-
 import datetime
 import logging
 import time
@@ -119,8 +118,10 @@
 Text = datastore_types.Text
 Blob = datastore_types.Blob
 
+
 _kind_map = {}
 
+
 _SELF_REFERENCE = object()
 
 
@@ -146,13 +147,17 @@
 
 
 class ConfigurationError(Error):
-  """Raised when a property is improperly configured."""
+  """Raised when a property or model is improperly configured."""
 
 
 class ReservedWordError(Error):
   """Raised when a property is defined for a reserved word."""
 
 
+class DerivedPropertyError(Error):
+  """Raised when attempting to assign a value to a derived property."""
+
+
 _ALLOWED_PROPERTY_TYPES = set([
     basestring,
     str,
@@ -226,6 +231,36 @@
         "definition." % locals())
 
 
+def _initialize_properties(model_class, name, bases, dct):
+  """Initialize Property attributes for Model-class.
+
+  Args:
+    model_class: Model class to initialize properties for.
+  """
+  model_class._properties = {}
+  defined = set()
+  for base in bases:
+    if hasattr(base, '_properties'):
+      property_keys = base._properties.keys()
+      duplicate_properties = defined.intersection(property_keys)
+      if duplicate_properties:
+        raise DuplicatePropertyError(
+            'Duplicate properties in base class %s already defined: %s' %
+            (base.__name__, list(duplicate_properties)))
+      defined.update(property_keys)
+      model_class._properties.update(base._properties)
+
+  for attr_name in dct.keys():
+    attr = dct[attr_name]
+    if isinstance(attr, Property):
+      check_reserved_word(attr_name)
+      if attr_name in defined:
+        raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
+      defined.add(attr_name)
+      model_class._properties[attr_name] = attr
+      attr.__property_config__(model_class, attr_name)
+
+
 class PropertiedClass(type):
   """Meta-class for initializing Model classes properties.
 
@@ -239,7 +274,7 @@
   Duplicate properties are not permitted.
   """
 
-  def __init__(cls, name, bases, dct):
+  def __init__(cls, name, bases, dct, map_kind=True):
     """Initializes a class that might have property definitions.
 
     This method is called when a class is created with the PropertiedClass
@@ -272,30 +307,10 @@
     """
     super(PropertiedClass, cls).__init__(name, bases, dct)
 
-    cls._properties = {}
-    defined = set()
-    for base in bases:
-      if hasattr(base, '_properties'):
-        property_keys = base._properties.keys()
-        duplicate_properties = defined.intersection(property_keys)
-        if duplicate_properties:
-          raise DuplicatePropertyError(
-              'Duplicate properties in base class %s already defined: %s' %
-              (base.__name__, list(duplicate_properties)))
-        defined.update(property_keys)
-        cls._properties.update(base._properties)
-
-    for attr_name in dct.keys():
-      attr = dct[attr_name]
-      if isinstance(attr, Property):
-        check_reserved_word(attr_name)
-        if attr_name in defined:
-          raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
-        defined.add(attr_name)
-        cls._properties[attr_name] = attr
-        attr.__property_config__(cls, attr_name)
-
-    _kind_map[cls.kind()] = cls
+    _initialize_properties(cls, name, bases, dct)
+
+    if map_kind:
+      _kind_map[cls.kind()] = cls
 
 
 class Property(object):
@@ -466,7 +481,10 @@
     return value
 
   def _attr_name(self):
-    """Attribute name we use for this property in model instances."""
+    """Attribute name we use for this property in model instances.
+
+    DO NOT USE THIS METHOD.
+    """
     return '_' + self.name
 
   data_type = str
@@ -500,7 +518,12 @@
 
   __metaclass__ = PropertiedClass
 
-  def __init__(self, parent=None, key_name=None, _app=None, **kwds):
+  def __init__(self,
+               parent=None,
+               key_name=None,
+               _app=None,
+               _from_entity=False,
+               **kwds):
     """Creates a new instance of this model.
 
     To create a new entity, you instantiate a model and then call save(),
@@ -524,6 +547,7 @@
         level instance.
       key_name: Name for new model instance.
       _app: Intentionally undocumented.
+      _from_entity: Intentionally undocumented.
       args: Keyword arguments mapping to properties of model.
     """
     if key_name == '':
@@ -533,15 +557,22 @@
                         key_name.__class__.__name__)
 
     if parent is not None:
-      if not isinstance(parent, Model):
+      if not isinstance(parent, (Model, Key)):
         raise TypeError('Expected Model type; received %s (is %s)' %
                         (parent, parent.__class__.__name__))
-      if not parent.is_saved():
+      if isinstance(parent, Model) and not parent.has_key():
         raise BadValueError(
-            "%s instance must be saved before it can be used as a "
+            "%s instance must have a complete key before it can be used as a "
             "parent." % parent.kind())
-
-    self._parent = parent
+      if isinstance(parent, Key):
+        self._parent_key = parent
+        self._parent = None
+      else:
+        self._parent_key = parent.key()
+        self._parent = parent
+    else:
+      self._parent_key = None
+      self._parent = None
     self._entity = None
     self._key_name = key_name
     self._app = _app
@@ -552,7 +583,11 @@
         value = kwds[prop.name]
       else:
         value = prop.default_value()
-      prop.__set__(self, value)
+      try:
+        prop.__set__(self, value)
+      except DerivedPropertyError, e:
+        if prop.name in kwds and not _from_entity:
+          raise
 
   def key(self):
     """Unique key for this entity.
@@ -569,6 +604,9 @@
     """
     if self.is_saved():
       return self._entity.key()
+    elif self._key_name:
+      parent = self._parent and self._parent.key()
+      return Key.from_path(self.kind(), self._key_name, parent=parent)
     else:
       raise NotSavedError()
 
@@ -634,7 +672,12 @@
     if self.is_saved():
       entity = self._entity
     else:
-      if self._parent is not None:
+      if self._parent_key is not None:
+        entity = _entity_class(self.kind(),
+                               parent=self._parent_key,
+                               name=self._key_name,
+                               _app=self._app)
+      elif self._parent is not None:
         entity = _entity_class(self.kind(),
                                parent=self._parent._entity,
                                name=self._key_name,
@@ -668,6 +711,18 @@
     """
     return self._entity is not None
 
+  def has_key(self):
+    """Determine if this model instance has a complete key.
+
+    Ids are not assigned until the data is saved to the Datastore, but
+    instances with a key name always have a full key.
+
+    Returns:
+      True if the object has been persisted to the datastore or has a key_name,
+      otherwise False.
+    """
+    return self.is_saved() or self._key_name
+
   def dynamic_properties(self):
     """Returns a list of all dynamic properties defined for instance."""
     return []
@@ -683,10 +738,10 @@
       Parent of contained entity or parent provided in constructor, None if
       instance has no parent.
     """
-    if (self._parent is None and
-        self._entity is not None and
-        self._entity.parent() is not None):
-      self._parent = get(self._entity.parent())
+    if self._parent is None:
+      parent_key = self.parent_key()
+      if parent_key is not None:
+        self._parent = get(parent_key)
     return self._parent
 
   def parent_key(self):
@@ -698,7 +753,9 @@
     Returns:
       Parent key of entity, None if there is no parent.
     """
-    if self._parent is not None:
+    if self._parent_key is not None:
+      return self._parent_key
+    elif self._parent is not None:
       return self._parent.key()
     elif self._entity is not None:
       return self._entity.parent()
@@ -924,7 +981,7 @@
                       (repr(cls), entity.kind()))
 
     entity_values = cls._load_entity_values(entity)
-    instance = cls(None, **entity_values)
+    instance = cls(None, _from_entity=True, **entity_values)
     instance._entity = entity
     del instance._key_name
     return instance
@@ -1016,14 +1073,23 @@
   """Delete one or more Model instances.
 
   Args:
-    models: Model instance or list of Model instances.
+    models_or_keys: Model instance or list of Model instances.
 
   Raises:
     TransactionFailedError if the data could not be committed.
   """
-  models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
-  entities = [model.key() for model in models]
-  keys = datastore.Delete(entities)
+  models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
+      models, (Model, Key, basestring))
+  keys = []
+  for model_or_key in models_or_keys:
+    if isinstance(model_or_key, Model):
+      key = model_or_key = model_or_key.key()
+    elif isinstance(model_or_key, basestring):
+      key = model_or_key = Key(model_or_key)
+    else:
+      key = model_or_key
+    keys.append(key)
+  datastore.Delete(keys)
 
 
 class Expando(Model):
@@ -1133,7 +1199,7 @@
         self._dynamic_properties = {}
       self._dynamic_properties[key] = value
     else:
-      Model.__setattr__(self, key, value)
+      super(Expando, self).__setattr__(key, value)
 
   def __getattr__(self, key):
     """If no explicit attribute defined, retrieve value from entity.
@@ -1211,7 +1277,7 @@
     Args:
       entity: Entity which contain values to search dyanmic properties for.
     """
-    entity_values = Model._load_entity_values(entity)
+    entity_values = super(Expando, cls)._load_entity_values(entity)
     for key, value in entity.iteritems():
       if key not in entity_values:
         entity_values[str(key)] = value
@@ -1537,7 +1603,7 @@
       else:
         raise NotSavedError()
     elif isinstance(ancestor, Model):
-      if ancestor.is_saved():
+      if ancestor.has_key():
         self.__ancestor = ancestor.key()
       else:
         raise NotSavedError()
@@ -2170,20 +2236,33 @@
       if not isinstance(value, list):
         raise BadValueError('Property %s must be a list' % self.name)
 
-      if self.item_type in (int, long):
-        item_type = (int, long)
-      else:
-        item_type = self.item_type
-
-      for item in value:
-        if not isinstance(item, item_type):
-          if item_type == (int, long):
-            raise BadValueError('Items in the %s list must all be integers.' %
-                                self.name)
-          else:
-            raise BadValueError(
-                'Items in the %s list must all be %s instances' %
-                (self.name, self.item_type.__name__))
+      value = self.validate_list_contents(value)
+    return value
+
+  def validate_list_contents(self, value):
+    """Validates that all items in the list are of the correct type.
+
+    Returns:
+      The validated list.
+
+    Raises:
+      BadValueError if the list has items are not instances of the
+      item_type given to the constructor.
+    """
+    if self.item_type in (int, long):
+      item_type = (int, long)
+    else:
+      item_type = self.item_type
+
+    for item in value:
+      if not isinstance(item, item_type):
+        if item_type == (int, long):
+          raise BadValueError('Items in the %s list must all be integers.' %
+                              self.name)
+        else:
+          raise BadValueError(
+              'Items in the %s list must all be %s instances' %
+              (self.name, self.item_type.__name__))
     return value
 
   def empty(self, value):
@@ -2210,6 +2289,15 @@
     """
     return list(super(ListProperty, self).default_value())
 
+  def get_value_for_datastore(self, model_instance):
+    """Get value from property to send to datastore.
+
+    Returns:
+      validated list appropriate to save in the datastore.
+    """
+    return self.validate_list_contents(
+        super(ListProperty, self).get_value_for_datastore(model_instance))
+
 
 class StringListProperty(ListProperty):
   """A property that stores a list of strings.
@@ -2368,9 +2456,9 @@
     if isinstance(value, datastore.Key):
       return value
 
-    if value is not None and not value.is_saved():
+    if value is not None and not value.has_key():
       raise BadValueError(
-          '%s instance must be saved before it can be stored as a '
+          '%s instance must have a complete key before it can be stored as a '
           'reference' % self.reference_class.kind())
 
     value = super(ReferenceProperty, self).validate(value)
--- a/thirdparty/google_appengine/google/appengine/ext/webapp/template.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/template.py	Sat Dec 06 16:52:21 2008 +0000
@@ -176,12 +176,12 @@
       return value.replace(arg, '')
     register.filter(cut)
 
-  Then, register the custom template module with the register_template_module
+  Then, register the custom template module with the register_template_library
   function below in your application module:
 
     myapp.py
     ========
-    webapp.template.register_template_module('templatefilters')
+    webapp.template.register_template_library('templatefilters')
   """
   return django.template.Library()
 
--- a/thirdparty/google_appengine/google/appengine/ext/webapp/util.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/util.py	Sat Dec 06 16:52:21 2008 +0000
@@ -34,7 +34,7 @@
 def login_required(handler_method):
   """A decorator to require that a user be logged in to access a handler.
 
-  To use it, decorate your get() or post() method like this:
+  To use it, decorate your get() method like this:
 
     @login_required
     def get(self):
@@ -42,8 +42,8 @@
       self.response.out.write('Hello, ' + user.nickname())
 
   We will redirect to a login page if the user is not logged in. We always
-  redirect to the request URI, and Google Accounts only redirects back as a GET request,
-  so this should not be used for POSTs.
+  redirect to the request URI, and Google Accounts only redirects back as a GET
+  request, so this should not be used for POSTs.
   """
   def check_login(self, *args):
     if self.request.method != 'GET':
--- a/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Sat Dec 06 16:52:21 2008 +0000
@@ -196,8 +196,9 @@
     """
     continue_location = "http://localhost/"
     args = {"continue": continue_location, "auth": auth_token}
-    req = self._CreateRequest("http://%s/_ah/login?%s" %
-                              (self.host, urllib.urlencode(args)))
+    login_path = os.environ.get("APPCFG_LOGIN_PATH", "/_ah")
+    req = self._CreateRequest("http://%s%s/login?%s" %
+                              (self.host, login_path, urllib.urlencode(args)))
     try:
       response = self.opener.open(req)
     except urllib2.HTTPError, e:
@@ -927,6 +928,7 @@
       for i in xrange(self.num_days):
         then = time.gmtime(now - 24*3600 * i)
         patterns.append(re.escape(time.strftime("%d/%m/%Y", then)))
+        patterns.append(re.escape(time.strftime("%d/%b/%Y", then)))
       self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):")
 
   def DownloadLogs(self):
@@ -1453,6 +1455,7 @@
     args: The positional command line args left over after parsing the options.
     raw_input_fn: Function used for getting raw user input, like email.
     password_input_fn: Function used for getting user password.
+    error_fh: Unexpected HTTPErrors are printed to this file handle.
 
   Attributes for testing:
     parser_class: The class to use for parsing the command line.  Because
@@ -1463,7 +1466,8 @@
   def __init__(self, argv, parser_class=optparse.OptionParser,
                rpc_server_class=HttpRpcServer,
                raw_input_fn=raw_input,
-               password_input_fn=getpass.getpass):
+               password_input_fn=getpass.getpass,
+               error_fh=sys.stderr):
     """Initializer.  Parses the cmdline and selects the Action to use.
 
     Initializes all of the attributes described in the class docstring.
@@ -1475,12 +1479,14 @@
       rpc_server_class: RPC server class to use for this application.
       raw_input_fn: Function used for getting user email.
       password_input_fn: Function used for getting user password.
+      error_fh: Unexpected HTTPErrors are printed to this file handle.
     """
     self.parser_class = parser_class
     self.argv = argv
     self.rpc_server_class = rpc_server_class
     self.raw_input_fn = raw_input_fn
     self.password_input_fn = password_input_fn
+    self.error_fh = error_fh
 
     self.parser = self._GetOptionParser()
     for action in self.actions.itervalues():
@@ -1509,23 +1515,20 @@
     global verbosity
     verbosity = self.options.verbose
 
-  def Run(self, error_fh=sys.stderr):
+  def Run(self):
     """Executes the requested action.
 
     Catches any HTTPErrors raised by the action and prints them to stderr.
-
-    Args:
-      error_fh: Print any HTTPErrors to this file handle.
     """
     try:
       self.action.function(self)
     except urllib2.HTTPError, e:
       body = e.read()
-      print >>error_fh, ("Error %d: --- begin server output ---\n"
-                         "%s\n--- end server output ---" %
-                         (e.code, body.rstrip("\n")))
+      print >>self.error_fh, ("Error %d: --- begin server output ---\n"
+                              "%s\n--- end server output ---" %
+                              (e.code, body.rstrip("\n")))
     except yaml_errors.EventListenerError, e:
-      print >>error_fh, ("Error parsing yaml file:\n%s" % e)
+      print >>self.error_fh, ("Error parsing yaml file:\n%s" % e)
 
   def _GetActionDescriptions(self):
     """Returns a formatted string containing the short_descs for all actions."""
@@ -1735,7 +1738,15 @@
     index_defs = self._ParseIndexYaml(basepath)
     if index_defs:
       index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
-      index_upload.DoUpload()
+      try:
+        index_upload.DoUpload()
+      except urllib2.HTTPError, e:
+        StatusUpdate("Error %d: --- begin server output ---\n"
+                     "%s\n--- end server output ---" %
+                     (e.code, e.read().rstrip("\n")))
+        print >> self.error_fh, (
+          "Your app was updated, but there was an error updating your indexes. "
+          "Please retry later with appcfg.py update_indexes.")
 
   def _UpdateOptions(self, parser):
     """Adds update-specific options to 'parser'.
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Sat Dec 06 16:52:21 2008 +0000
@@ -46,6 +46,7 @@
 import imp
 import inspect
 import itertools
+import locale
 import logging
 import mimetools
 import mimetypes
@@ -111,6 +112,10 @@
                        ('.wbmp', 'image/vnd.wap.wbmp')):
   mimetypes.add_type(mime_type, ext)
 
+MAX_RUNTIME_RESPONSE_SIZE = 1 << 20
+
+MAX_REQUEST_SIZE = 10 * 1024 * 1024
+
 
 class Error(Exception):
   """Base-class for exceptions in this module."""
@@ -570,6 +575,13 @@
   return ('Linux', '', '', '', '')
 
 
+def FakeSetLocale(category, value=None, original_setlocale=locale.setlocale):
+  """Fake version of locale.setlocale that only supports the default."""
+  if value not in (None, '', 'C', 'POSIX'):
+    raise locale.Error, 'locale emulation only supports "C" locale'
+  return original_setlocale(category, 'C')
+
+
 def IsPathInSubdirectories(filename,
                            subdirectories,
                            normcase=os.path.normcase):
@@ -681,7 +693,8 @@
                       if os.path.isfile(filename))
 
   ALLOWED_DIRS = set([
-    os.path.normcase(os.path.realpath(os.path.dirname(os.__file__)))
+    os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
+    os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
   ])
 
   NOT_ALLOWED_DIRS = set([
@@ -713,8 +726,10 @@
     Args:
       root_path: Path to the root of the application.
     """
-    FakeFile._application_paths = set(os.path.abspath(path)
-                                      for path in application_paths)
+    FakeFile._application_paths = (set(os.path.realpath(path)
+                                       for path in application_paths) |
+                                   set(os.path.abspath(path)
+                                       for path in application_paths))
 
   @staticmethod
   def IsFileAccessible(filename, normcase=os.path.normcase):
@@ -756,7 +771,7 @@
 
     return False
 
-  def __init__(self, filename, mode='r', **kwargs):
+  def __init__(self, filename, mode='r', bufsize=-1, **kwargs):
     """Initializer. See file built-in documentation."""
     if mode not in FakeFile.ALLOWED_MODES:
       raise IOError('invalid mode: %s' % mode)
@@ -764,7 +779,7 @@
     if not FakeFile.IsFileAccessible(filename):
       raise IOError(errno.EACCES, 'file not accessible')
 
-    super(FakeFile, self).__init__(filename, mode, **kwargs)
+    super(FakeFile, self).__init__(filename, mode, bufsize, **kwargs)
 
 
 class RestrictedPathFunction(object):
@@ -1024,9 +1039,14 @@
   ]
 
   _MODULE_OVERRIDES = {
+    'locale': {
+      'setlocale': FakeSetLocale,
+    },
+
     'os': {
       'listdir': RestrictedPathFunction(os.listdir),
-      'lstat': RestrictedPathFunction(os.lstat),
+
+      'lstat': RestrictedPathFunction(os.stat),
       'stat': RestrictedPathFunction(os.stat),
       'uname': FakeUname,
       'urandom': FakeURandom,
@@ -1535,7 +1555,8 @@
     depth_count += 1
 
   for index in xrange(depth_count):
-    current_init_file = os.path.join(module_base, '__init__.py')
+    current_init_file = os.path.abspath(
+        os.path.join(module_base, '__init__.py'))
 
     if not isfile(current_init_file):
       missing_init_files.append(current_init_file)
@@ -2403,6 +2424,15 @@
 
         infile = cStringIO.StringIO(self.rfile.read(
             int(self.headers.get('content-length', 0))))
+
+        request_size = len(infile.getvalue())
+        if request_size > MAX_REQUEST_SIZE:
+          msg = ('HTTP request was too large: %d.  The limit is: %d.'
+                 % (request_size, MAX_REQUEST_SIZE))
+          logging.error(msg)
+          self.send_response(httplib.REQUEST_ENTITY_TOO_LARGE, msg)
+          return
+
         outfile = cStringIO.StringIO()
         try:
           dispatcher.Dispatch(self.path,
@@ -2416,8 +2446,21 @@
 
         outfile.flush()
         outfile.seek(0)
+
         status_code, status_message, header_data, body = RewriteResponse(outfile)
 
+        runtime_response_size = len(outfile.getvalue())
+        if runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE:
+          status_code = 403
+          status_message = 'Forbidden'
+          new_headers = []
+          for header in header_data.split('\n'):
+            if not header.lower().startswith('content-length'):
+              new_headers.append(header)
+          header_data = '\n'.join(new_headers)
+          body = ('HTTP response was too large: %d.  The limit is: %d.'
+                  % (runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE))
+
       except yaml_errors.EventListenerError, e:
         title = 'Fatal error when loading application configuration'
         msg = '%s:\n%s' % (title, str(e))
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver_index.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver_index.py	Sat Dec 06 16:52:21 2008 +0000
@@ -64,9 +64,9 @@
   indexes = dict((key, 0) for key in all_keys - manual_keys)
 
   for query, count in query_history.iteritems():
-    key = datastore_index.CompositeIndexForQuery(query)
-    if key is not None:
-      key = key[:3]
+    required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
+    if required:
+      key = (kind, ancestor, props)
       if key not in manual_keys:
         if key in indexes:
           indexes[key] += count
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver_login.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver_login.py	Sat Dec 06 16:52:21 2008 +0000
@@ -247,8 +247,6 @@
       output_headers.append(SetUserInfoCookie(set_email, set_admin))
 
     redirect_url = continue_url or login_url
-  elif email and continue_url:
-    redirect_url = continue_url
 
   if redirect_url:
     outfile.write('Status: 302 Redirecting to continue URL\r\n')
--- a/thirdparty/google_appengine/google/appengine/tools/os_compat.py	Sat Dec 06 14:50:45 2008 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/os_compat.py	Sat Dec 06 16:52:21 2008 +0000
@@ -40,3 +40,5 @@
   class WindowsError(Exception):
     """A fake Windows Error exception which should never be thrown."""
 
+
+ERROR_PATH_NOT_FOUND = 3