Update Google App Engine from 1.2.3 to 1.2.5 in thirdparty folder.
authorPawel Solyga <Pawel.Solyga@gmail.com>
Sun, 06 Sep 2009 23:31:53 +0200
changeset 2864 2e0b0af889be
parent 2862 27971a13089f
child 2866 a04b1e4126c4
Update Google App Engine from 1.2.3 to 1.2.5 in thirdparty folder.
thirdparty/google_appengine/RELEASE_NOTES
thirdparty/google_appengine/VERSION
thirdparty/google_appengine/google/appengine/api/api_base_pb.py
thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py
thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py
thirdparty/google_appengine/google/appengine/api/app_logging.py
thirdparty/google_appengine/google/appengine/api/appinfo.py
thirdparty/google_appengine/google/appengine/api/capabilities/capability_service_pb.py
thirdparty/google_appengine/google/appengine/api/datastore.py
thirdparty/google_appengine/google/appengine/api/datastore_admin.py
thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py
thirdparty/google_appengine/google/appengine/api/datastore_types.py
thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py
thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py
thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
thirdparty/google_appengine/google/appengine/api/mail.py
thirdparty/google_appengine/google/appengine/api/mail_errors.py
thirdparty/google_appengine/google/appengine/api/mail_service_pb.py
thirdparty/google_appengine/google/appengine/api/memcache/__init__.py
thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py
thirdparty/google_appengine/google/appengine/api/namespace_manager/__init__.py
thirdparty/google_appengine/google/appengine/api/queueinfo.py
thirdparty/google_appengine/google/appengine/api/quota.py
thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py
thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py
thirdparty/google_appengine/google/appengine/api/user_service_pb.py
thirdparty/google_appengine/google/appengine/api/user_service_stub.py
thirdparty/google_appengine/google/appengine/api/users.py
thirdparty/google_appengine/google/appengine/api/validation.py
thirdparty/google_appengine/google/appengine/api/xmpp/__init__.py
thirdparty/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py
thirdparty/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py
thirdparty/google_appengine/google/appengine/base/capabilities_pb.py
thirdparty/google_appengine/google/appengine/cron/GrocLexer.py
thirdparty/google_appengine/google/appengine/cron/GrocParser.py
thirdparty/google_appengine/google/appengine/cron/groctimespecification.py
thirdparty/google_appengine/google/appengine/datastore/action_pb.py
thirdparty/google_appengine/google/appengine/datastore/datastore_index.py
thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py
thirdparty/google_appengine/google/appengine/datastore/datastore_v3_pb.py
thirdparty/google_appengine/google/appengine/datastore/entity_pb.py
thirdparty/google_appengine/google/appengine/dist/_library.py
thirdparty/google_appengine/google/appengine/dist/httplib.py
thirdparty/google_appengine/google/appengine/dist/py_zipimport.py
thirdparty/google_appengine/google/appengine/ext/admin/__init__.py
thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html
thirdparty/google_appengine/google/appengine/ext/admin/templates/css/ae.css
thirdparty/google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css
thirdparty/google_appengine/google/appengine/ext/admin/templates/css/xmpp.css
thirdparty/google_appengine/google/appengine/ext/admin/templates/inboundmail.html
thirdparty/google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js
thirdparty/google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js
thirdparty/google_appengine/google/appengine/ext/admin/templates/js/webhook.js
thirdparty/google_appengine/google/appengine/ext/admin/templates/xmpp.html
thirdparty/google_appengine/google/appengine/ext/db/__init__.py
thirdparty/google_appengine/google/appengine/ext/db/polymodel.py
thirdparty/google_appengine/google/appengine/ext/deferred/__init__.py
thirdparty/google_appengine/google/appengine/ext/deferred/deferred.py
thirdparty/google_appengine/google/appengine/ext/ereporter/__init__.py
thirdparty/google_appengine/google/appengine/ext/ereporter/ereporter.py
thirdparty/google_appengine/google/appengine/ext/ereporter/report_generator.py
thirdparty/google_appengine/google/appengine/ext/ereporter/templates/report.html
thirdparty/google_appengine/google/appengine/ext/gql/__init__.py
thirdparty/google_appengine/google/appengine/ext/key_range/__init__.py
thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py
thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py
thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py
thirdparty/google_appengine/google/appengine/ext/remote_api/throttle.py
thirdparty/google_appengine/google/appengine/ext/search/__init__.py
thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py
thirdparty/google_appengine/google/appengine/ext/webapp/mail_handlers.py
thirdparty/google_appengine/google/appengine/ext/webapp/xmpp_handlers.py
thirdparty/google_appengine/google/appengine/runtime/apiproxy.py
thirdparty/google_appengine/google/appengine/tools/adaptive_thread_pool.py
thirdparty/google_appengine/google/appengine/tools/appcfg.py
thirdparty/google_appengine/google/appengine/tools/bulkloader.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver.py
thirdparty/google_appengine/google/appengine/tools/dev_appserver_main.py
thirdparty/google_appengine/google/appengine/tools/remote_api_shell.py
thirdparty/google_appengine/google/appengine/tools/requeue.py
thirdparty/google_appengine/google/net/proto/ProtocolBuffer.py
thirdparty/google_appengine/google/net/proto/message_set.py
thirdparty/google_appengine/lib/django/PKG-INFO
thirdparty/google_appengine/lib/django/django/__init__.py
thirdparty/google_appengine/lib/django/django/conf/global_settings.py
thirdparty/google_appengine/lib/django/django/contrib/admin/templates/admin/login.html
thirdparty/google_appengine/lib/django/django/contrib/admin/views/decorators.py
thirdparty/google_appengine/lib/django/django/core/management.py
thirdparty/google_appengine/lib/django/django/core/servers/basehttp.py
thirdparty/google_appengine/lib/django/setup.py
thirdparty/google_appengine/remote_api_shell.py
--- a/thirdparty/google_appengine/RELEASE_NOTES	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/RELEASE_NOTES	Sun Sep 06 23:31:53 2009 +0200
@@ -3,6 +3,48 @@
 
 App Engine Python SDK - Release Notes
 
+Version 1.2.5 - August 13, 2009
+===============================
+  - The Windows Python SDK now includes a GUI launcher, similar to the Mac SDK.
+  - Added XMPP support.
+    http://code.google.com/appengine/docs/python/xmpp
+    http://code.google.com/p/googleappengine/issues/detail?id=231
+  - Datastore now supports multiple writes to the same entity within a 
+    transaction.
+  - Datastore entity key names can now start with a digit.
+      http://code.google.com/p/googleappengine/issues/detail?id=1352
+  - Datastore now supports ancestor + kind queries without a composite index
+      http://code.google.com/p/googleappengine/issues/detail?id=1003
+  - Bulkloader now supports configurationless dump and restore with new
+    --dump and --restore options.
+  - Bulkloader now supports a --dry_run flag to testing data prior to uploading.
+  - Appcfg.py now allows specifying any end date for request_logs.
+  - Urlfetch now allows setting the Referer header.
+      http://code.google.com/p/googleappengine/issues/detail?id=445
+  - Urlfetch stub now correctly handles HEAD requests.
+      http://code.google.com/p/googleappengine/issues/detail?id=866
+  - New remote_api_shell tool for interactive remote_api operations.
+  - New google.ext.ereporter module to collect and email exception reports.
+  - New google.ext.deferred module to execute ad-hoc tasks on the Task Queue.
+
+Version 1.2.4 - July 16, 2009
+=============================
+  - Added support for kindless queries, ie. transaction descendant queries.
+      http://code.google.com/p/googleappengine/issues/detail?id=913
+  - Composite indexes no longer required for certain types of key queries.
+  - Improved exception reporting in the bulkloader.
+  - Datastore transaction RPC sent at beginning of transaction rather than
+    upon first Datastore request.
+  - PolyModel supports keys_only query.
+      http://code.google.com/p/googleappengine/issues/detail?id=1630
+  - Remote API supports more API's (Images, Memcache and URLFetch).
+      http://code.google.com/p/googleappengine/issues/detail?id=1596
+  - Remote API shell.
+  - Support for multiple inheritance for Model and PolyModel.
+  - Enhancement to SearchableModel allowing multiple properties to be
+    indexed.
+  - Various code quality improvements.
+
 Version 1.2.3 - June 1, 2009
 ============================
 
--- a/thirdparty/google_appengine/VERSION	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/VERSION	Sun Sep 06 23:31:53 2009 +0200
@@ -1,3 +1,3 @@
-release: "1.2.3"
-timestamp: 1243913623
+release: "1.2.5"
+timestamp: 1250206498
 api_versions: ['1']
--- a/thirdparty/google_appengine/google/appengine/api/api_base_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/api_base_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -88,18 +88,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -169,18 +172,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -250,18 +256,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt64(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -330,18 +339,21 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatBool(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -410,18 +422,105 @@
     if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormat(self.value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kvalue = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class BytesProto(ProtocolBuffer.ProtocolMessage):
+  has_value_ = 0
+  value_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def value(self): return self.value_
+
+  def set_value(self, x):
+    self.has_value_ = 1
+    self.value_ = x
+
+  def clear_value(self):
+    if self.has_value_:
+      self.has_value_ = 0
+      self.value_ = ""
+
+  def has_value(self): return self.has_value_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_value()): self.set_value(x.value())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_value_ != x.has_value_: return 0
+    if self.has_value_ and self.value_ != x.value_: return 0
+    return 1
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.DOUBLE,
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_value_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: value not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.value_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_value()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.value_)
 
-  )
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_value(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kvalue = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "value",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -465,15 +564,19 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','VoidProto']
+__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','BytesProto','VoidProto']
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_rpc.py	Sun Sep 06 23:31:53 2009 +0200
@@ -64,6 +64,7 @@
     self.callback = callback
     self.deadline = deadline
     self.stub = stub
+    self.cpu_usage_mcycles = 0
 
   def MakeCall(self, package=None, call=None, request=None, response=None,
                callback=None, deadline=None):
--- a/thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/apiproxy_stub_map.py	Sun Sep 06 23:31:53 2009 +0200
@@ -111,7 +111,10 @@
     unique_key = (key, inspect.getmodule(function))
     if unique_key in self.__unique_keys:
       return False
-    self.__content.insert(index, (key, function, service))
+    num_args = len(inspect.getargspec(function)[0])
+    if (inspect.ismethod(function)):
+      num_args -= 1
+    self.__content.insert(index, (key, function, service, num_args))
     self.__unique_keys.add(unique_key)
     return True
 
@@ -150,7 +153,7 @@
     self.__content = []
     self.__unique_keys = set()
 
-  def Call(self, service, call, request, response):
+  def Call(self, service, call, request, response, rpc=None):
     """Invokes all hooks in this collection.
 
     Args:
@@ -158,10 +161,14 @@
       call: string representing which function to call
       request: protocol buffer for the request
       response: protocol buffer for the response
+      rpc: optional RPC used to make this call
     """
-    for key, function, srv in self.__content:
+    for key, function, srv, num_args in self.__content:
       if srv is None or srv == service:
-        function(service, call, request, response)
+        if num_args == 5:
+          function(service, call, request, response, rpc)
+        else:
+          function(service, call, request, response)
 
 
 class APIProxyStubMap(object):
@@ -240,9 +247,17 @@
     """
     stub = self.GetStub(service)
     assert stub, 'No api proxy found for service "%s"' % service
-    self.__precall_hooks.Call(service, call, request, response)
-    stub.MakeSyncCall(service, call, request, response)
-    self.__postcall_hooks.Call(service, call, request, response)
+    if hasattr(stub, 'CreateRPC'):
+      rpc = stub.CreateRPC()
+      self.__precall_hooks.Call(service, call, request, response, rpc)
+      rpc.MakeCall(service, call, request, response)
+      rpc.Wait()
+      rpc.CheckSuccess()
+      self.__postcall_hooks.Call(service, call, request, response, rpc)
+    else:
+      self.__precall_hooks.Call(service, call, request, response)
+      stub.MakeSyncCall(service, call, request, response)
+      self.__postcall_hooks.Call(service, call, request, response)
 
 
 class UserRPC(object):
@@ -385,7 +400,8 @@
     self.__method = method
     self.__get_result_hook = get_result_hook
     self.__user_data = user_data
-    apiproxy.GetPreCallHooks().Call(self.__service, method, request, response)
+    apiproxy.GetPreCallHooks().Call(
+        self.__service, method, request, response, self.__rpc)
     self.__rpc.MakeCall(self.__service, method, request, response)
 
   def wait(self):
@@ -424,7 +440,7 @@
     if not self.__postcall_hooks_called:
       self.__postcall_hooks_called = True
       apiproxy.GetPostCallHooks().Call(self.__service, self.__method,
-                                       self.request, self.response)
+                                       self.request, self.response, self.__rpc)
 
   def get_result(self):
     """Get the result of the RPC, or possibly raise an exception.
--- a/thirdparty/google_appengine/google/appengine/api/app_logging.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/app_logging.py	Sun Sep 06 23:31:53 2009 +0200
@@ -68,7 +68,9 @@
     StreamHandler.emit()."""
     try:
       message = self._AppLogsMessage(record)
-      self.stream.write(message.encode("UTF-8"))
+      if isinstance(message, unicode):
+        message = message.encode("UTF-8")
+      self.stream.write(message)
       self.flush()
     except (KeyboardInterrupt, SystemExit):
       raise
--- a/thirdparty/google_appengine/google/appengine/api/appinfo.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/appinfo.py	Sun Sep 06 23:31:53 2009 +0200
@@ -15,7 +15,7 @@
 # limitations under the License.
 #
 
-"""AppInfo tools
+"""AppInfo tools.
 
 Library for working with AppInfo records in memory, store and load from
 configuration files.
@@ -29,8 +29,8 @@
 
 from google.appengine.api import appinfo_errors
 from google.appengine.api import validation
+from google.appengine.api import yaml_builder
 from google.appengine.api import yaml_listener
-from google.appengine.api import yaml_builder
 from google.appengine.api import yaml_object
 
 
@@ -40,11 +40,13 @@
 _DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
 _EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
 
+_SERVICE_RE_STRING = r'(mail|xmpp_message)'
+
 _EXPIRATION_CONVERSIONS = {
-  'd': 60 * 60 * 24,
-  'h': 60 * 60,
-  'm': 60,
-  's': 1,
+    'd': 60 * 60 * 24,
+    'h': 60 * 60,
+    'm': 60,
+    's': 1,
 }
 
 APP_ID_MAX_LEN = 100
@@ -72,17 +74,17 @@
 
 REQUIRE_MATCHING_FILE = 'require_matching_file'
 
-DEFAULT_SKIP_FILES = (r"^(.*/)?("
-                      r"(app\.yaml)|"
-                      r"(app\.yml)|"
-                      r"(index\.yaml)|"
-                      r"(index\.yml)|"
-                      r"(#.*#)|"
-                      r"(.*~)|"
-                      r"(.*\.py[co])|"
-                      r"(.*/RCS/.*)|"
-                      r"(\..*)|"
-                      r")$")
+DEFAULT_SKIP_FILES = (r'^(.*/)?('
+                      r'(app\.yaml)|'
+                      r'(app\.yml)|'
+                      r'(index\.yaml)|'
+                      r'(index\.yml)|'
+                      r'(#.*#)|'
+                      r'(.*~)|'
+                      r'(.*\.py[co])|'
+                      r'(.*/RCS/.*)|'
+                      r'(\..*)|'
+                      r')$')
 
 LOGIN = 'login'
 SECURE = 'secure'
@@ -101,6 +103,7 @@
 HANDLERS = 'handlers'
 DEFAULT_EXPIRATION = 'default_expiration'
 SKIP_FILES = 'skip_files'
+SERVICES = 'inbound_services'
 
 
 class URLMap(validation.Validated):
@@ -176,42 +179,42 @@
 
   ATTRIBUTES = {
 
-    URL: validation.Optional(_URL_REGEX),
-    LOGIN: validation.Options(LOGIN_OPTIONAL,
-                              LOGIN_REQUIRED,
-                              LOGIN_ADMIN,
-                              default=LOGIN_OPTIONAL),
+      URL: validation.Optional(_URL_REGEX),
+      LOGIN: validation.Options(LOGIN_OPTIONAL,
+                                LOGIN_REQUIRED,
+                                LOGIN_ADMIN,
+                                default=LOGIN_OPTIONAL),
 
-    SECURE: validation.Options(SECURE_HTTP,
-                               SECURE_HTTPS,
-                               SECURE_HTTP_OR_HTTPS,
-                               default=SECURE_HTTP),
+      SECURE: validation.Options(SECURE_HTTP,
+                                 SECURE_HTTPS,
+                                 SECURE_HTTP_OR_HTTPS,
+                                 default=SECURE_HTTP),
 
 
 
-    HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
-    UPLOAD: validation.Optional(_FILES_REGEX),
+      HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
+      UPLOAD: validation.Optional(_FILES_REGEX),
 
 
-    HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
+      HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
 
 
-    MIME_TYPE: validation.Optional(str),
-    EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+      MIME_TYPE: validation.Optional(str),
+      EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
 
 
-    HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
+      HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
 
-    REQUIRE_MATCHING_FILE: validation.Optional(bool),
+      REQUIRE_MATCHING_FILE: validation.Optional(bool),
   }
 
   COMMON_FIELDS = set([URL, LOGIN, SECURE])
 
   ALLOWED_FIELDS = {
-    HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION,
-                           REQUIRE_MATCHING_FILE),
-    HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION, REQUIRE_MATCHING_FILE),
-    HANDLER_SCRIPT: (),
+      HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION,
+                             REQUIRE_MATCHING_FILE),
+      HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION, REQUIRE_MATCHING_FILE),
+      HANDLER_SCRIPT: (),
   }
 
   def GetHandler(self):
@@ -253,9 +256,9 @@
           not (attribute in allowed_fields or
                attribute in URLMap.COMMON_FIELDS or
                attribute == mapping_type)):
-            raise appinfo_errors.UnexpectedHandlerAttribute(
-                'Unexpected attribute "%s" for mapping type %s.' %
-                (attribute, mapping_type))
+        raise appinfo_errors.UnexpectedHandlerAttribute(
+            'Unexpected attribute "%s" for mapping type %s.' %
+            (attribute, mapping_type))
 
     if mapping_type == HANDLER_STATIC_FILES and not self.upload:
       raise appinfo_errors.MissingHandlerAttribute(
@@ -309,15 +312,18 @@
   ATTRIBUTES = {
 
 
-    APPLICATION: APPLICATION_RE_STRING,
-    VERSION: VERSION_RE_STRING,
-    RUNTIME: RUNTIME_RE_STRING,
+      APPLICATION: APPLICATION_RE_STRING,
+      VERSION: VERSION_RE_STRING,
+      RUNTIME: RUNTIME_RE_STRING,
 
 
-    API_VERSION: API_VERSION_RE_STRING,
-    HANDLERS: validation.Optional(validation.Repeated(URLMap)),
-    DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
-    SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
+      API_VERSION: API_VERSION_RE_STRING,
+      HANDLERS: validation.Optional(validation.Repeated(URLMap)),
+
+      SERVICES: validation.Optional(validation.Repeated(
+          validation.Regex(_SERVICE_RE_STRING))),
+      DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+      SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
   }
 
   def CheckInitialized(self):
@@ -349,8 +355,9 @@
     An instance of AppInfoExternal as loaded from a YAML file.
 
   Raises:
-    EmptyConfigurationFile when there are no documents in YAML file.
-    MultipleConfigurationFile when there is more than one document in YAML
+    ValueError: if a specified service is not valid.
+    EmptyConfigurationFile: when there are no documents in YAML file.
+    MultipleConfigurationFile: when there is more than one document in YAML
     file.
   """
   builder = yaml_object.ObjectBuilder(AppInfoExternal)
@@ -386,7 +393,7 @@
 
 _file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$')
 
-_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-')
+_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-|^_ah/')
 
 _file_path_negative_2_re = re.compile(r'//|/$')
 
@@ -413,7 +420,8 @@
   if _file_path_positive_re.match(filename) is None:
     return 'Invalid character in filename: %s' % filename
   if _file_path_negative_1_re.search(filename) is not None:
-    return ('Filename cannot contain "." or ".." or start with "-": %s' %
+    return ('Filename cannot contain "." or ".." '
+            'or start with "-" or "_ah/": %s' %
             filename)
   if _file_path_negative_2_re.search(filename) is not None:
     return 'Filename cannot have trailing / or contain //: %s' % filename
--- a/thirdparty/google_appengine/google/appengine/api/capabilities/capability_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/capabilities/capability_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -159,26 +159,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kpackage = 1
   kcapability = 2
   kcall = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "package",
-   "capability",
-   "call",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "package",
+    2: "capability",
+    3: "call",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -337,26 +338,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   ksummary_status = 1
   ktime_until_scheduled = 2
   kconfig = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "summary_status",
-   "time_until_scheduled",
-   "config",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "summary_status",
+    2: "time_until_scheduled",
+    3: "config",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/datastore.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore.py	Sun Sep 06 23:31:53 2009 +0200
@@ -49,12 +49,19 @@
 from google.appengine.runtime import apiproxy_errors
 from google.appengine.datastore import entity_pb
 
+try:
+  from google.appengine.api.labs.taskqueue import taskqueue_service_pb
+except ImportError:
+  from google.appengine.api.taskqueue import taskqueue_service_pb
+
 MAX_ALLOWABLE_QUERIES = 30
 
 DEFAULT_TRANSACTION_RETRIES = 3
 
 _MAX_INDEXED_PROPERTIES = 5000
 
+_MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000
+
 Key = datastore_types.Key
 typename = datastore_types.typename
 
@@ -147,7 +154,7 @@
     return []
 
   for entity in entities:
-    if not entity.kind() or not entity.app():
+    if not entity.kind() or not entity.app_id_namespace():
       raise datastore_errors.BadRequestError(
           'App and kind must not be empty, in entity: %s' % entity)
 
@@ -156,8 +163,6 @@
 
   keys = [e.key() for e in entities]
   tx = _MaybeSetupTransaction(req, keys)
-  if tx:
-    tx.RecordModifiedKeys([k for k in keys if k.has_id_or_name()])
 
   resp = datastore_pb.PutResponse()
   try:
@@ -177,7 +182,6 @@
     entity._Entity__key._Key__reference.CopyFrom(key)
 
   if tx:
-    tx.RecordModifiedKeys([e.key() for e in entities], error_on_repeat=False)
     tx.entity_group = entities[0].entity_group()
 
   if multiple:
@@ -259,8 +263,6 @@
   req.key_list().extend([key._Key__reference for key in keys])
 
   tx = _MaybeSetupTransaction(req, keys)
-  if tx:
-    tx.RecordModifiedKeys(keys)
 
   resp = datastore_pb.DeleteResponse()
   try:
@@ -275,8 +277,8 @@
   Includes read-only accessors for app id, kind, and primary key. Also
   provides dictionary-style access to properties.
   """
-  def __init__(self, kind, parent=None, _app=None, name=None,
-               unindexed_properties=[]):
+  def __init__(self, kind, parent=None, _app=None, name=None, id=None,
+               unindexed_properties=[], _namespace=None):
     """Constructor. Takes the kind and transaction root, which cannot be
     changed after the entity is constructed, and an optional parent. Raises
     BadArgumentError or BadKeyError if kind is invalid or parent is not an
@@ -289,33 +291,41 @@
       parent: Entity or Key
       # if provided, this entity's name.
       name: string
+      # if provided, this entity's id.
+      id: integer
       # if provided, a sequence of property names that should not be indexed
       # by the built-in single property indices.
       unindexed_properties: list or tuple of strings
     """
     ref = entity_pb.Reference()
-    _app = datastore_types.ResolveAppId(_app)
-    ref.set_app(_app)
+    _app_namespace = datastore_types.ResolveAppIdNamespace(_app, _namespace)
+    ref.set_app(_app_namespace.to_encoded())
 
     datastore_types.ValidateString(kind, 'kind',
                                    datastore_errors.BadArgumentError)
-
     if parent is not None:
       parent = _GetCompleteKeyOrError(parent)
-      if _app != parent.app():
+      if _app_namespace != parent.app_id_namespace():
         raise datastore_errors.BadArgumentError(
-            "_app %s doesn't match parent's app %s" % (_app, parent.app()))
+            " %s doesn't match parent's app_namespace %s" %
+            (_app_namespace, parent.app_id_namespace()))
       ref.CopyFrom(parent._Key__reference)
 
     last_path = ref.mutable_path().add_element()
     last_path.set_type(kind.encode('utf-8'))
 
+    if name is not None and id is not None:
+      raise datastore_errors.BadArgumentError(
+          "Cannot set both name and id on an Entity")
+
     if name is not None:
       datastore_types.ValidateString(name, 'name')
-      if name[0] in string.digits:
-        raise datastore_errors.BadValueError('name cannot begin with a digit')
       last_path.set_name(name.encode('utf-8'))
 
+    if id is not None:
+      datastore_types.ValidateInteger(id, 'id')
+      last_path.set_id(id)
+
     unindexed_properties, multiple = NormalizeAndTypeCheck(unindexed_properties, basestring)
     if not multiple:
       raise datastore_errors.BadArgumentError(
@@ -329,15 +339,32 @@
 
   def app(self):
     """Returns the name of the application that created this entity, a
-    string.
+    string or None if not set.
     """
     return self.__key.app()
 
+  def namespace(self):
+    """Returns the namespace of this entity, a string or None.
+    """
+    return self.__key.namespace()
+
+  def app_id_namespace(self):
+    """Returns the AppIdNamespace of this entity or None if not set.
+    """
+    return self.__key.app_id_namespace()
+
   def kind(self):
     """Returns this entity's kind, a string.
     """
     return self.__key.kind()
 
+  def is_saved(self):
+    """Returns if this entity has been saved to the datastore
+    """
+    last_path = self.__key._Key__reference.path().element_list()[-1]
+    return ((last_path.has_name() ^ last_path.has_id()) and
+            self.__key.has_id_or_name())
+
   def key(self):
     """Returns this entity's primary key, a Key instance.
     """
@@ -483,7 +510,15 @@
 
     return xml
 
-  def _ToPb(self):
+  def ToPb(self):
+    """Converts this Entity to its protocol buffer representation.
+
+    Returns:
+      entity_pb.Entity
+    """
+    return self._ToPb(False)
+
+  def _ToPb(self, mark_key_as_saved=True):
     """Converts this Entity to its protocol buffer representation. Not
     intended to be used by application developers.
 
@@ -493,6 +528,9 @@
 
     pb = entity_pb.EntityProto()
     pb.mutable_key().CopyFrom(self.key()._ToPb())
+    last_path = pb.key().path().element_list()[-1]
+    if mark_key_as_saved and last_path.has_name() and last_path.has_id():
+      last_path.clear_id()
 
     group = pb.mutable_entity_group()
     if self.__key.has_id_or_name():
@@ -523,7 +561,25 @@
     return pb
 
   @staticmethod
-  def _FromPb(pb):
+  def FromPb(pb):
+    """Static factory method. Returns the Entity representation of the
+    given protocol buffer (datastore_pb.Entity).
+
+    Args:
+      pb: datastore_pb.Entity or str encoding of a datastore_pb.Entity
+
+    Returns:
+      Entity: the Entity representation of pb
+    """
+    if isinstance(pb, str):
+      real_pb = entity_pb.EntityProto()
+      real_pb.ParseFromString(pb)
+      pb = real_pb
+
+    return Entity._FromPb(pb, require_valid_key=False)
+
+  @staticmethod
+  def _FromPb(pb, require_valid_key=True):
     """Static factory method. Returns the Entity representation of the
     given protocol buffer (datastore_pb.Entity). Not intended to be used by
     application developers.
@@ -542,12 +598,13 @@
     assert pb.key().path().element_size() > 0
 
     last_path = pb.key().path().element_list()[-1]
-    assert last_path.has_id() ^ last_path.has_name()
-    if last_path.has_id():
-      assert last_path.id() != 0
-    else:
-      assert last_path.has_name()
-      assert last_path.name()
+    if require_valid_key:
+      assert last_path.has_id() ^ last_path.has_name()
+      if last_path.has_id():
+        assert last_path.id() != 0
+      else:
+        assert last_path.has_name()
+        assert last_path.name()
 
     unindexed_properties = [p.name() for p in pb.raw_property_list()]
 
@@ -701,7 +758,8 @@
   __inequality_prop = None
   __inequality_count = 0
 
-  def __init__(self, kind, filters={}, _app=None, keys_only=False):
+  def __init__(self, kind=None, filters={}, _app=None, keys_only=False,
+               _namespace=None):
     """Constructor.
 
     Raises BadArgumentError if kind is not a string. Raises BadValueError or
@@ -714,15 +772,17 @@
       filters: dict
       keys_only: boolean
     """
-    datastore_types.ValidateString(kind, 'kind',
-                                   datastore_errors.BadArgumentError)
+    if kind is not None:
+      datastore_types.ValidateString(kind, 'kind',
+                                     datastore_errors.BadArgumentError)
 
     self.__kind = kind
     self.__orderings = []
     self.__filter_order = {}
     self.update(filters)
 
-    self.__app = datastore_types.ResolveAppId(_app)
+    self.__app = datastore_types.ResolveAppIdNamespace(_app,
+                                                       _namespace).to_encoded()
     self.__keys_only = keys_only
 
   def Order(self, *orderings):
@@ -794,6 +854,13 @@
             str(direction))
         direction = Query.ASCENDING
 
+      if (self.__kind is None and
+          (property != datastore_types._KEY_SPECIAL_PROPERTY or
+          direction != Query.ASCENDING)):
+        raise datastore_errors.BadArgumentError(
+            'Only %s ascending orders are supported on kindless queries' %
+            datastore_types._KEY_SPECIAL_PROPERTY)
+
       orderings[i] = (property, direction)
 
     if (orderings and self.__inequality_prop and
@@ -884,16 +951,17 @@
     """
     return self._Run()
 
-  def _Run(self, limit=None, offset=None):
+  def _Run(self, limit=None, offset=None,
+           prefetch_count=None, next_count=None):
     """Runs this query, with an optional result limit and an optional offset.
 
-    Identical to Run, with the extra optional limit and offset parameters.
-    limit and offset must both be integers >= 0.
+    Identical to Run, with the extra optional limit, offset, prefetch_count,
+    next_count parameters. These parameters must be integers >= 0.
 
     This is not intended to be used by application developers. Use Get()
     instead!
     """
-    pb = self._ToPb(limit, offset)
+    pb = self._ToPb(limit, offset, prefetch_count)
     result = datastore_pb.QueryResult()
 
     try:
@@ -907,7 +975,7 @@
         raise datastore_errors.NeedIndexError(
           str(exc) + '\nThis query needs this index:\n' + yaml)
 
-    return Iterator(result)
+    return Iterator(result, batch_size=next_count)
 
   def Get(self, limit, offset=0):
     """Fetches and returns a maximum number of results from the query.
@@ -956,7 +1024,8 @@
           'Argument to Get named \'offset\' must be an int greater than or '
           'equal to 0; received %s (a %s)' % (offset, typename(offset)))
 
-    return self._Run(limit, offset)._Get(limit)
+    return self._Run(limit=limit, offset=offset,
+                     prefetch_count=limit)._Get(limit)
 
   def Count(self, limit=None):
     """Returns the number of entities that this query matches. The returned
@@ -1108,6 +1177,12 @@
           'first sort order, if any sort orders are supplied' %
           ', '.join(self.INEQUALITY_OPERATORS))
 
+    if (self.__kind is None and
+        property != datastore_types._KEY_SPECIAL_PROPERTY):
+      raise datastore_errors.BadFilterError(
+          'Only %s filters are allowed on kindless queries.' %
+          datastore_types._KEY_SPECIAL_PROPERTY)
+
     if property in datastore_types._SPECIAL_PROPERTIES:
       if property == datastore_types._KEY_SPECIAL_PROPERTY:
         for value in values:
@@ -1118,7 +1193,7 @@
 
     return match
 
-  def _ToPb(self, limit=None, offset=None):
+  def _ToPb(self, limit=None, offset=None, count=None):
     """Converts this Query to its protocol buffer representation. Not
     intended to be used by application developers. Enforced by hiding the
     datastore_pb classes.
@@ -1129,6 +1204,8 @@
       # number of results that match the query to skip.  limit is applied
       # after the offset is fulfilled
       offset: int
+      # the requested initial batch size
+      count: int
 
     Returns:
       # the PB representation of this Query
@@ -1138,6 +1215,7 @@
       BadRequestError if called inside a transaction and the query does not
       include an ancestor.
     """
+
     if not self.__ancestor and _CurrentTransactionKey():
       raise datastore_errors.BadRequestError(
         'Only ancestor queries are allowed inside transactions.')
@@ -1145,7 +1223,8 @@
     pb = datastore_pb.Query()
     _MaybeSetupTransaction(pb, [self.__ancestor])
 
-    pb.set_kind(self.__kind.encode('utf-8'))
+    if self.__kind is not None:
+      pb.set_kind(self.__kind.encode('utf-8'))
     pb.set_keys_only(bool(self.__keys_only))
     if self.__app:
       pb.set_app(self.__app.encode('utf-8'))
@@ -1153,6 +1232,8 @@
       pb.set_limit(limit)
     if offset is not None:
       pb.set_offset(offset)
+    if count is not None:
+      pb.set_count(count)
     if self.__ancestor:
       pb.mutable_ancestor().CopyFrom(self.__ancestor._Key__reference)
 
@@ -1193,6 +1274,44 @@
     return pb
 
 
+def AllocateIds(model_key, size):
+  """Allocates a range of IDs of size for the key defined by model_key
+
+  Allocates a range of IDs in the datastore such that those IDs will not
+  be automatically assigned to new entities. You can only allocate IDs
+  for model keys from your app. If there is an error, raises a subclass of
+  datastore_errors.Error.
+
+  Args:
+    model_key: Key or string to serve as a model specifying the ID sequence
+               in which to allocate IDs
+
+  Returns:
+    (start, end) of the allocated range, inclusive.
+  """
+  keys, multiple = NormalizeAndTypeCheckKeys(model_key)
+
+  if len(keys) > 1:
+    raise datastore_errors.BadArgumentError(
+        'Cannot allocate IDs for more than one model key at a time')
+
+  if size > _MAX_ID_BATCH_SIZE:
+    raise datastore_errors.BadArgumentError(
+        'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE)
+
+  req = datastore_pb.AllocateIdsRequest()
+  req.mutable_model_key().CopyFrom(keys[0]._Key__reference)
+  req.set_size(size)
+
+  resp = datastore_pb.AllocateIdsResponse()
+  try:
+    apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AllocateIds', req, resp)
+  except apiproxy_errors.ApplicationError, err:
+    raise _ToDatastoreError(err)
+
+  return resp.start(), resp.end()
+
+
 class MultiQuery(Query):
   """Class representing a query which requires multiple datastore queries.
 
@@ -1517,9 +1636,10 @@
   > for person in it:
   >   print 'Hi, %s!' % person['name']
   """
-  def __init__(self, query_result_pb):
+  def __init__(self, query_result_pb, batch_size=None):
     self.__cursor = query_result_pb.cursor()
     self.__keys_only = query_result_pb.keys_only()
+    self.__batch_size = batch_size
     self.__buffer = self._ProcessQueryResult(query_result_pb)
 
   def _Get(self, count):
@@ -1547,16 +1667,16 @@
       # a list of entities or keys
       [Entity or Key, ...]
     """
-    entityList = self._Next(count)
-    while len(entityList) < count and self.__more_results:
-      next_results = self._Next(count - len(entityList))
+    entity_list = self._Next(count)
+    while len(entity_list) < count and self.__more_results:
+      next_results = self._Next(count - len(entity_list), self.__batch_size)
       if not next_results:
         break
-      entityList += next_results
-    return entityList;
-
-  def _Next(self, count):
-    """Returns the next result(s) of the query.
+      entity_list += next_results
+    return entity_list;
+
+  def _Next(self, count=None):
+    """Returns the next batch of results.
 
     Not intended to be used by application developers. Use the python
     iterator protocol instead.
@@ -1565,11 +1685,14 @@
     results. If the query specified a sort order, results are returned in that
     order. Otherwise, the order is undefined.
 
-    The argument, count, specifies the number of results to return. However, the
-    length of the returned list may be smaller than count. This is the case if
-    count is greater than the number of remaining results or the size of the
-    remaining results exciteds the RPC buffer limit. Use _Get to insure all
-    possible entities are retrieved.
+    The optional argument, count, specifies the number of results to return.
+    However, the length of the returned list may be smaller than count. This is
+    the case if count is greater than the number of remaining results or the
+    size of the remaining results exceeds the RPC buffer limit. Use _Get to
+    insure all possible entities are retrieved.
+
+    If the count is omitted, the datastore backend decides how many entities to
+    send.
 
     There is an internal buffer for use with the next() method. If this buffer
     is not empty, up to 'count' values are removed from this buffer and
@@ -1580,19 +1703,23 @@
 
     Args:
       # the number of results to return; must be >= 1
-      count: int or long
+      count: int or long or None
 
     Returns:
       # a list of entities or keys
       [Entity or Key, ...]
     """
-    if not isinstance(count, (int, long)) or count <= 0:
+    if count is not None and (not isinstance(count, (int, long)) or count <= 0):
       raise datastore_errors.BadArgumentError(
         'Argument to _Next must be an int greater than 0; received %s (a %s)' %
         (count, typename(count)))
 
     if self.__buffer:
-      if count <= len(self.__buffer):
+      if count is None:
+        entity_list = self.__buffer
+        self.__buffer = []
+        return entity_list
+      elif count <= len(self.__buffer):
         entity_list = self.__buffer[:count]
         del self.__buffer[:count]
         return entity_list
@@ -1601,13 +1728,15 @@
         self.__buffer = []
         count -= len(entity_list)
     else:
-        entity_list=[]
+        entity_list = []
+
 
     if not self.__more_results:
       return entity_list
 
     req = datastore_pb.NextRequest()
-    req.set_count(count)
+    if count is not None:
+      req.set_count(count)
     req.mutable_cursor().CopyFrom(self.__cursor)
     result = datastore_pb.QueryResult()
     try:
@@ -1642,11 +1771,9 @@
     else:
       return [Entity._FromPb(e) for e in result.result_list()]
 
-  _BUFFER_SIZE = 20
-
   def next(self):
     if not self.__buffer:
-      self.__buffer = self._Next(self._BUFFER_SIZE)
+      self.__buffer = self._Next(self.__batch_size)
     try:
       return self.__buffer.pop(0)
     except IndexError:
@@ -1657,44 +1784,28 @@
 class _Transaction(object):
   """Encapsulates a transaction currently in progress.
 
-  If we've sent a BeginTransaction call, then handle will be a
-  datastore_pb.Transaction that holds the transaction handle.
-
   If we know the entity group for this transaction, it's stored in the
-  entity_group attribute, which is set by RecordModifiedKeys().
+  entity_group attribute, which is set by RunInTransaction().
 
   modified_keys is a set containing the Keys of all entities modified (ie put
   or deleted) in this transaction. If an entity is modified more than once, a
   BadRequestError is raised.
   """
-  def __init__(self):
-    """Initializes modified_keys to the empty set."""
-    self.handle = None
+  def __init__(self, handle):
+    """Initializes the transaction.
+
+    Args:
+      handle: a datastore_pb.Transaction returned by a BeginTransaction call
+    """
+    assert isinstance(handle, datastore_pb.Transaction)
+    explanation = []
+    assert handle.IsInitialized(explanation), explanation
+
+    self.handle = handle
     self.entity_group = None
     self.modified_keys = None
     self.modified_keys = set()
 
-  def RecordModifiedKeys(self, keys, error_on_repeat=True):
-    """Updates the modified keys seen so far.
-
-    If error_on_repeat is True and any of the given keys have already been
-    modified, raises BadRequestError.
-
-    Args:
-      keys: sequence of Keys
-    """
-    keys, _ = NormalizeAndTypeCheckKeys(keys)
-    keys = set(keys)
-
-    if error_on_repeat:
-      already_modified = self.modified_keys.intersection(keys)
-      if already_modified:
-        raise datastore_errors.BadRequestError(
-          "Can't update entity more than once in a transaction: %r" %
-          already_modified.pop())
-
-    self.modified_keys.update(keys)
-
 
 def RunInTransaction(function, *args, **kwargs):
   """Runs a function inside a datastore transaction.
@@ -1799,26 +1910,31 @@
 
   try:
     tx_key = _NewTransactionKey()
-    tx = _Transaction()
-    _txes[tx_key] = tx
 
     for i in range(0, retries + 1):
-      tx.modified_keys.clear()
+      handle = datastore_pb.Transaction()
+      try:
+        apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
+                                       api_base_pb.VoidProto(), handle)
+      except apiproxy_errors.ApplicationError, err:
+        raise _ToDatastoreError(err)
+
+      tx = _Transaction(handle)
+      _txes[tx_key] = tx
 
       try:
         result = function(*args, **kwargs)
       except:
         original_exception = sys.exc_info()
 
-        if tx.handle:
-          try:
-            resp = api_base_pb.VoidProto()
-            apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
-                                           tx.handle, resp)
-          except:
-            exc_info = sys.exc_info()
-            logging.info('Exception sending Rollback:\n' +
-                         ''.join(traceback.format_exception(*exc_info)))
+        try:
+          resp = api_base_pb.VoidProto()
+          apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
+                                         tx.handle, resp)
+        except:
+          exc_info = sys.exc_info()
+          logging.info('Exception sending Rollback:\n' +
+                       ''.join(traceback.format_exception(*exc_info)))
 
         type, value, trace = original_exception
         if type is datastore_errors.Rollback:
@@ -1826,21 +1942,20 @@
         else:
           raise type, value, trace
 
-      if tx.handle:
-        try:
-          resp = datastore_pb.CommitResponse()
-          apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
-                                         tx.handle, resp)
-        except apiproxy_errors.ApplicationError, err:
-          if (err.application_error ==
-              datastore_pb.Error.CONCURRENT_TRANSACTION):
-            logging.warning('Transaction collision for entity group with '
-                            'key %r. Retrying...', tx.entity_group)
-            tx.handle = None
-            tx.entity_group = None
-            continue
-          else:
-            raise _ToDatastoreError(err)
+      try:
+        resp = datastore_pb.CommitResponse()
+        apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
+                                       tx.handle, resp)
+      except apiproxy_errors.ApplicationError, err:
+        if (err.application_error ==
+            datastore_pb.Error.CONCURRENT_TRANSACTION):
+          logging.warning('Transaction collision for entity group with '
+                          'key %r. Retrying...', tx.entity_group)
+          tx.handle = None
+          tx.entity_group = None
+          continue
+        else:
+          raise _ToDatastoreError(err)
 
       return result
 
@@ -1854,12 +1969,11 @@
 
 
 def _MaybeSetupTransaction(request, keys):
-  """Begins a transaction, if necessary, and populates it in the request.
+  """If we're in a transaction, validates and populates it in the request.
 
   If we're currently inside a transaction, this records the entity group,
-  checks that the keys are all in that entity group, creates the transaction
-  PB, and sends the BeginTransaction. It then populates the transaction handle
-  in the request.
+  checks that the keys are all in that entity group, and populates the
+  transaction handle in the request.
 
   Raises BadRequestError if the entity has a different entity group than the
   current transaction.
@@ -1872,7 +1986,9 @@
     _Transaction if we're inside a transaction, otherwise None
   """
   assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
-                              datastore_pb.DeleteRequest, datastore_pb.Query))
+                              datastore_pb.DeleteRequest, datastore_pb.Query,
+                              taskqueue_service_pb.TaskQueueAddRequest,
+                              )), request.__class__
   tx_key = None
 
   try:
@@ -1883,8 +1999,10 @@
       groups = [k.entity_group() for k in keys]
       if tx.entity_group:
         expected_group = tx.entity_group
+      elif groups:
+        expected_group = groups[0]
       else:
-        expected_group = groups[0]
+        expected_group = None
 
       for group in groups:
         if (group != expected_group or
@@ -1901,12 +2019,7 @@
         if not tx.entity_group and group.has_id_or_name():
           tx.entity_group = group
 
-      if not tx.handle:
-        tx.handle = datastore_pb.Transaction()
-        req = api_base_pb.VoidProto()
-        apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction', req,
-                                       tx.handle)
-
+      assert tx.handle.IsInitialized()
       request.mutable_transaction().CopyFrom(tx.handle)
 
       return tx
--- a/thirdparty/google_appengine/google/appengine/api/datastore_admin.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_admin.py	Sun Sep 06 23:31:53 2009 +0200
@@ -39,7 +39,7 @@
     }
 
 
-def GetSchema(_app=None):
+def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None):
   """Infers an app's schema from the entities in the datastore.
 
   Note that the PropertyValue PBs in the returned EntityProtos are empty
@@ -48,11 +48,21 @@
   throw UserNotFoundError because their email and auth domain fields will be
   empty.
 
+  Args:
+    properties: boolean, whether to include property names and types
+    start_kind, end_kind: optional range endpoints for the kinds to return,
+      compared lexicographically
+
   Returns:
     list of entity_pb.EntityProto, with kind and property names and types
   """
-  req = api_base_pb.StringProto()
-  req.set_value(datastore_types.ResolveAppId(_app))
+  req = datastore_pb.GetSchemaRequest()
+  req.set_app(datastore_types.ResolveAppId(_app))
+  req.set_properties(properties)
+  if start_kind is not None:
+    req.set_start_kind(start_kind)
+  if end_kind is not None:
+    req.set_end_kind(end_kind)
   resp = datastore_pb.Schema()
 
   _Call('GetSchema', req, resp)
--- a/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_file_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -75,6 +75,7 @@
 
 _MAX_QUERY_COMPONENTS = 100
 
+_BATCH_SIZE = 20
 
 class _StoredEntity(object):
   """Simple wrapper around an entity stored by the stub.
@@ -105,7 +106,14 @@
     cursor: the integer cursor
     count: the original total number of results
     keys_only: whether the query is keys_only
+
+  Class attributes:
+    _next_cursor: the next cursor to allocate
+    _next_cursor_lock: protects _next_cursor
   """
+  _next_cursor = 1
+  _next_cursor_lock = threading.Lock()
+
   def __init__(self, results, keys_only):
     """Constructor.
 
@@ -117,7 +125,13 @@
     self.__results = results
     self.count = len(results)
     self.keys_only = keys_only
-    self.cursor = id(self)
+
+    self._next_cursor_lock.acquire()
+    try:
+      self.cursor = _Cursor._next_cursor
+      _Cursor._next_cursor += 1
+    finally:
+      self._next_cursor_lock.release()
 
   def PopulateQueryResult(self, result, count):
     """Populates a QueryResult with this cursor and the given number of results.
@@ -272,8 +286,25 @@
       raise datastore_errors.BadRequestError(
           'app %s cannot access app %s\'s data' % (self.__app_id, app_id))
 
+  def __ValidateKey(self, key):
+    """Validate this key.
 
-  def _AppKindForKey(self, key):
+    Args:
+      key: entity_pb.Reference
+
+    Raises:
+      datastore_errors.BadRequestError: if the key is invalid
+    """
+    assert isinstance(key, entity_pb.Reference)
+
+    self.__ValidateAppId(key.app())
+
+    for elem in key.path().element_list():
+      if elem.has_id() == elem.has_name():
+        raise datastore_errors.BadRequestError(
+          'each key path element should have id or name but not both: %r' % key)
+
+  def _AppIdNamespaceKindForKey(self, key):
     """ Get (app, kind) tuple from given key.
 
     The (app, kind) tuple is used as an index into several internal
@@ -295,7 +326,7 @@
       entity: entity_pb.EntityProto
     """
     key = entity.key()
-    app_kind = self._AppKindForKey(key)
+    app_kind = self._AppIdNamespaceKindForKey(key)
     if app_kind not in self.__entities:
       self.__entities[app_kind] = {}
     self.__entities[app_kind][key] = _StoredEntity(entity)
@@ -440,16 +471,20 @@
       self.__file_lock.release()
 
   def MakeSyncCall(self, service, call, request, response):
-    """ The main RPC entry point. service must be 'datastore_v3'. So far, the
-    supported calls are 'Get', 'Put', 'RunQuery', 'Next', and 'Count'.
+    """ The main RPC entry point. service must be 'datastore_v3'.
     """
+    self.assertPbIsInitialized(request)
     super(DatastoreFileStub, self).MakeSyncCall(service,
                                                 call,
                                                 request,
                                                 response)
+    self.assertPbIsInitialized(response)
 
+  def assertPbIsInitialized(self, pb):
+    """Raises an exception if the given PB is not initialized and valid."""
     explanation = []
-    assert response.IsInitialized(explanation), explanation
+    assert pb.IsInitialized(explanation), explanation
+    pb.Encode()
 
   def QueryHistory(self):
     """Returns a dict that maps Query PBs to times they've been run.
@@ -460,7 +495,7 @@
   def _Dynamic_Put(self, put_request, put_response):
     clones = []
     for entity in put_request.entity_list():
-      self.__ValidateAppId(entity.key().app())
+      self.__ValidateKey(entity.key())
 
       clone = entity_pb.EntityProto()
       clone.CopyFrom(entity)
@@ -515,7 +550,7 @@
 
     for key in get_request.key_list():
       self.__ValidateAppId(key.app())
-      app_kind = self._AppKindForKey(key)
+      app_kind = self._AppIdNamespaceKindForKey(key)
 
       group = get_response.add_entity()
       try:
@@ -532,7 +567,7 @@
     try:
       for key in delete_request.key_list():
         self.__ValidateAppId(key.app())
-        app_kind = self._AppKindForKey(key)
+        app_kind = self._AppIdNamespaceKindForKey(key)
         try:
           del self.__entities[app_kind][key]
           if not self.__entities[app_kind]:
@@ -559,8 +594,9 @@
       entities = self.__entities
       self.__tx_lock.release()
 
-    app = query.app()
-    self.__ValidateAppId(app)
+    app_id_namespace = datastore_types.parse_app_id_namespace(query.app())
+    app_id = app_id_namespace.app_id()
+    self.__ValidateAppId(app_id)
 
     if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
       raise apiproxy_errors.ApplicationError(
@@ -575,11 +611,14 @@
           ('query is too large. may not have more than %s filters'
            ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
 
+    (filters, orders) = datastore_index.Normalize(query.filter_list(),
+                                                  query.order_list())
+
     if self.__require_indexes:
       required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
       if required:
         required_key = kind, ancestor, props
-        indexes = self.__indexes.get(app)
+        indexes = self.__indexes.get(app_id)
         if not indexes:
           raise apiproxy_errors.ApplicationError(
               datastore_pb.Error.NEED_INDEX,
@@ -606,9 +645,15 @@
               "You must update the index.yaml file in your application root.")
 
     try:
-      query.set_app(app)
-      results = entities[app, query.kind()].values()
-      results = [entity.native for entity in results]
+      query.set_app(app_id_namespace.to_encoded())
+      if query.has_kind():
+        results = entities[app_id_namespace.to_encoded(), query.kind()].values()
+        results = [entity.native for entity in results]
+      else:
+        results = []
+        for key in entities:
+          if key[0] == app_id_namespace.to_encoded():
+            results += [entity.native for entity in entities[key].values()]
     except KeyError:
       results = []
 
@@ -642,7 +687,7 @@
           return True
       return False
 
-    for filt in query.filter_list():
+    for filt in filters:
       assert filt.op() != datastore_pb.Query_Filter.IN
 
       prop = filt.property(0).name().decode('utf-8')
@@ -694,7 +739,7 @@
 
       results = filter(passes_filter, results)
 
-    for order in query.order_list():
+    for order in orders:
       prop = order.property().decode('utf-8')
       results = [entity for entity in results if has_prop_indexed(entity, prop)]
 
@@ -703,7 +748,7 @@
       entity a is considered smaller than, equal to, or larger than b,
       according to the query's orderings. """
       cmped = 0
-      for o in query.order_list():
+      for o in orders:
         prop = o.property().decode('utf-8')
 
         reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
@@ -773,7 +818,15 @@
 
     cursor = _Cursor(results, query.keys_only())
     self.__queries[cursor.cursor] = cursor
-    cursor.PopulateQueryResult(query_result, 0)
+
+    if query.has_count():
+      count = query.count()
+    elif query.has_limit():
+      count = query.limit()
+    else:
+      count = _BATCH_SIZE
+
+    cursor.PopulateQueryResult(query_result, count)
 
   def _Dynamic_Next(self, next_request, query_result):
     cursor_handle = next_request.cursor().cursor()
@@ -784,7 +837,10 @@
       raise apiproxy_errors.ApplicationError(
           datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
 
-    cursor.PopulateQueryResult(query_result, next_request.count())
+    count = _BATCH_SIZE
+    if next_request.has_count():
+      count = next_request.count()
+    cursor.PopulateQueryResult(query_result, count)
 
   def _Dynamic_Count(self, query, integer64proto):
     self.__ValidateAppId(query.app())
@@ -830,70 +886,94 @@
     self.__tx_snapshot = {}
     self.__tx_lock.release()
 
-  def _Dynamic_GetSchema(self, app_str, schema):
-    minint = -sys.maxint - 1
-    try:
-      minfloat = float('-inf')
-    except ValueError:
-      minfloat = -1e300000
-
-    app_str = app_str.value()
+  def _Dynamic_GetSchema(self, req, schema):
+    app_str = req.app()
     self.__ValidateAppId(app_str)
 
     kinds = []
 
     for app, kind in self.__entities:
-      if app == app_str:
-        app_kind = (app, kind)
-        if app_kind in self.__schema_cache:
-          kinds.append(self.__schema_cache[app_kind])
-          continue
+      if (app != app_str or
+          (req.has_start_kind() and kind < req.start_kind()) or
+          (req.has_end_kind() and kind > req.end_kind())):
+        continue
+
+      app_kind = (app, kind)
+      if app_kind in self.__schema_cache:
+        kinds.append(self.__schema_cache[app_kind])
+        continue
 
-        kind_pb = entity_pb.EntityProto()
-        kind_pb.mutable_key().set_app('')
-        kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
-        kind_pb.mutable_entity_group()
+      kind_pb = entity_pb.EntityProto()
+      kind_pb.mutable_key().set_app('')
+      kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
+      kind_pb.mutable_entity_group()
 
-        props = {}
+      props = {}
 
-        for entity in self.__entities[app_kind].values():
-          for prop in entity.protobuf.property_list():
-            if prop.name() not in props:
-              props[prop.name()] = entity_pb.PropertyValue()
-            props[prop.name()].MergeFrom(prop.value())
+      for entity in self.__entities[app_kind].values():
+        for prop in entity.protobuf.property_list():
+          if prop.name() not in props:
+            props[prop.name()] = entity_pb.PropertyValue()
+          props[prop.name()].MergeFrom(prop.value())
 
-        for value_pb in props.values():
-          if value_pb.has_int64value():
-            value_pb.set_int64value(minint)
-          if value_pb.has_booleanvalue():
-            value_pb.set_booleanvalue(False)
-          if value_pb.has_stringvalue():
-            value_pb.set_stringvalue('')
-          if value_pb.has_doublevalue():
-            value_pb.set_doublevalue(minfloat)
-          if value_pb.has_pointvalue():
-            value_pb.mutable_pointvalue().set_x(minfloat)
-            value_pb.mutable_pointvalue().set_y(minfloat)
-          if value_pb.has_uservalue():
-            value_pb.mutable_uservalue().set_gaiaid(minint)
-            value_pb.mutable_uservalue().set_email('')
-            value_pb.mutable_uservalue().set_auth_domain('')
-            value_pb.mutable_uservalue().clear_nickname()
-          elif value_pb.has_referencevalue():
-            value_pb.clear_referencevalue()
-            value_pb.mutable_referencevalue().set_app('')
+      for value_pb in props.values():
+        if value_pb.has_int64value():
+          value_pb.set_int64value(0)
+        if value_pb.has_booleanvalue():
+          value_pb.set_booleanvalue(False)
+        if value_pb.has_stringvalue():
+          value_pb.set_stringvalue('none')
+        if value_pb.has_doublevalue():
+          value_pb.set_doublevalue(0.0)
+        if value_pb.has_pointvalue():
+          value_pb.mutable_pointvalue().set_x(0.0)
+          value_pb.mutable_pointvalue().set_y(0.0)
+        if value_pb.has_uservalue():
+          value_pb.mutable_uservalue().set_gaiaid(0)
+          value_pb.mutable_uservalue().set_email('none')
+          value_pb.mutable_uservalue().set_auth_domain('none')
+          value_pb.mutable_uservalue().clear_nickname()
+          value_pb.mutable_uservalue().clear_obfuscated_gaiaid()
+        if value_pb.has_referencevalue():
+          value_pb.clear_referencevalue()
+          value_pb.mutable_referencevalue().set_app('none')
+          pathelem = value_pb.mutable_referencevalue().add_pathelement()
+          pathelem.set_type('none')
+          pathelem.set_name('none')
 
-        for name, value_pb in props.items():
-          prop_pb = kind_pb.add_property()
-          prop_pb.set_name(name)
-          prop_pb.set_multiple(False)
-          prop_pb.mutable_value().CopyFrom(value_pb)
+      for name, value_pb in props.items():
+        prop_pb = kind_pb.add_property()
+        prop_pb.set_name(name)
+        prop_pb.set_multiple(False)
+        prop_pb.mutable_value().CopyFrom(value_pb)
 
-        kinds.append(kind_pb)
-        self.__schema_cache[app_kind] = kind_pb
+      kinds.append(kind_pb)
+      self.__schema_cache[app_kind] = kind_pb
 
     for kind_pb in kinds:
-      schema.add_kind().CopyFrom(kind_pb)
+      kind = schema.add_kind()
+      kind.CopyFrom(kind_pb)
+      if not req.properties():
+        kind.clear_property()
+
+    schema.set_more_results(False)
+
+  def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response):
+    model_key = allocate_ids_request.model_key()
+    size = allocate_ids_request.size()
+
+    self.__ValidateAppId(model_key.app())
+
+    try:
+      self.__id_lock.acquire()
+      start = self.__next_id
+      self.__next_id += size
+      end = self.__next_id - 1
+    finally:
+     self.__id_lock.release()
+
+    allocate_ids_response.set_start(start)
+    allocate_ids_response.set_end(end)
 
   def _Dynamic_CreateIndex(self, index, id_response):
     self.__ValidateAppId(index.app_id())
--- a/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/datastore_types.py	Sun Sep 06 23:31:53 2009 +0200
@@ -47,6 +47,7 @@
 from google.appengine.datastore import datastore_pb
 from google.appengine.api import datastore_errors
 from google.appengine.api import users
+from google.appengine.api import namespace_manager
 from google.net.proto import ProtocolBuffer
 from google.appengine.datastore import entity_pb
 
@@ -59,6 +60,8 @@
 _KEY_SPECIAL_PROPERTY = '__key__'
 _SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
 
+_NAMESPACE_SEPARATOR='!'
+
 class UtcTzinfo(datetime.tzinfo):
   def utcoffset(self, dt): return datetime.timedelta(0)
   def dst(self, dt): return datetime.timedelta(0)
@@ -80,7 +83,8 @@
 def ValidateString(value,
                    name='unused',
                    exception=datastore_errors.BadValueError,
-                   max_len=_MAX_STRING_LENGTH):
+                   max_len=_MAX_STRING_LENGTH,
+                   empty_ok=False):
   """Raises an exception if value is not a valid string or a subclass thereof.
 
   A string is valid if it's not empty, no more than _MAX_STRING_LENGTH bytes,
@@ -91,17 +95,49 @@
     value: the value to validate.
     name: the name of this value; used in the exception message.
     exception: the type of exception to raise.
-    max_len: the maximum allowed length, in bytes
+    max_len: the maximum allowed length, in bytes.
+    empty_ok: allow empty value.
   """
+  if value is None and empty_ok:
+    return
   if not isinstance(value, basestring) or isinstance(value, Blob):
     raise exception('%s should be a string; received %s (a %s):' %
                     (name, value, typename(value)))
-  if not value:
+  if not value and not empty_ok:
     raise exception('%s must not be empty.' % name)
 
   if len(value.encode('utf-8')) > max_len:
     raise exception('%s must be under %d bytes.' % (name, max_len))
 
+def ValidateInteger(value,
+                   name='unused',
+                   exception=datastore_errors.BadValueError,
+                   empty_ok=False,
+                   zero_ok=False,
+                   negative_ok=False):
+  """Raises an exception if value is not a valid integer.
+
+  An integer is valid if it's not negative or empty and is an integer.
+  The exception type can be specified with the exception argument;
+  it defaults to BadValueError.
+
+  Args:
+    value: the value to validate.
+    name: the name of this value; used in the exception message.
+    exception: the type of exception to raise.
+    empty_ok: allow None value.
+    zero_ok: allow zero value.
+    negative_ok: allow negative value.
+  """
+  if value is None and empty_ok:
+    return
+  if not isinstance(value, int):
+    raise exception('%s should be an integer; received %s (a %s).' %
+                    (name, value, typename(value)))
+  if not value and not zero_ok:
+    raise exception('%s must not be 0 (zero)' % name)
+  if value < 0 and not negative_ok:
+    raise exception('%s must not be negative.' % name)
 
 def ResolveAppId(app, name='_app'):
   """Validate app id, providing a default.
@@ -124,6 +160,152 @@
   return app
 
 
+class AppIdNamespace(object):
+  """Combined AppId and Namespace
+
+  An identifier that combines the application identifier and the
+  namespace.
+  """
+  __app_id = None
+  __namespace = None
+
+  def __init__(self, app_id, namespace):
+    """Constructor. Creates a AppIdNamespace from two strings.
+
+    Args:
+      app_id: application identifier string
+      namespace: namespace identifier string
+    Raises:
+      BadArgumentError if the values contain
+      the _NAMESPACE_SEPARATOR character (!) or
+      the app_id is empty.
+    """
+    self.__app_id = app_id
+    if namespace:
+      self.__namespace = namespace
+    else:
+      self.__namespace = None
+    ValidateString(self.__app_id, 'app_id', datastore_errors.BadArgumentError)
+    ValidateString(self.__namespace,
+                   'namespace', datastore_errors.BadArgumentError,
+                   empty_ok=True)
+    if _NAMESPACE_SEPARATOR in self.__app_id:
+      raise datastore_errors.BadArgumentError(
+        'app_id must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+    if self.__namespace and _NAMESPACE_SEPARATOR in self.__namespace:
+      raise datastore_errors.BadArgumentError(
+        'namespace must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+
+  def __cmp__(self, other):
+    """Returns negative, zero, or positive when comparing two AppIdNamespace.
+
+    Args:
+      other: AppIdNamespace to compare to.
+
+    Returns:
+      Negative if self is less than "other"
+      Zero if "other" is equal to self
+      Positive if self is greater than "other"
+    """
+    if not isinstance(other, AppIdNamespace):
+      return cmp(id(self), id(other))
+    return cmp((self.__app_id, self.__namespace),
+               (other.__app_id, other.__namespace))
+
+  def to_encoded(self):
+    """Returns this AppIdNamespace's string equivalent
+
+    i.e. "app!namespace"
+    """
+    if not self.__namespace:
+      return self.__app_id
+    else:
+      return self.__app_id + _NAMESPACE_SEPARATOR + self.__namespace
+
+  def app_id(self):
+    """Returns this AppId portion of this AppIdNamespace.
+    """
+    return self.__app_id;
+
+  def namespace(self):
+    """Returns this namespace portion of this AppIdNamespace.
+    """
+    return self.__namespace;
+
+
+def PartitionString(value, separator):
+  """Equivalent to python2.5 str.partition()
+     TODO(gmariani) use str.partition() when python 2.5 is adopted.
+
+  Args:
+    value: String to be partitioned
+    separator: Separator string
+  """
+  index = value.find(separator);
+  if index == -1:
+    return (value, '', value[0:0]);
+  else:
+    return (value[0:index], separator, value[index+len(separator):len(value)])
+
+
+def parse_app_id_namespace(app_id_namespace):
+  """
+  An app_id_namespace string is valid if it's not empty, and contains
+  at most one namespace separator ('!').  Also, an app_id_namespace
+  with an empty namespace must not contain a namespace separator.
+
+  Args:
+    app_id_namespace: an encoded app_id_namespace.
+  Raises exception if format of app_id_namespace is invalid.
+  """
+  if not app_id_namespace:
+    raise datastore_errors.BadArgumentError(
+        'app_id_namespace must be non empty')
+  parts = PartitionString(app_id_namespace, _NAMESPACE_SEPARATOR)
+  if parts[1] == _NAMESPACE_SEPARATOR:
+    if not parts[2]:
+      raise datastore_errors.BadArgumentError(
+        'app_id_namespace must not contain a "%s" if the namespace is empty' %
+        _NAMESPACE_SEPARATOR)
+  if parts[2]:
+    return AppIdNamespace(parts[0], parts[2])
+  return AppIdNamespace(parts[0], None)
+
+def ResolveAppIdNamespace(
+    app_id=None, namespace=None, app_id_namespace=None):
+  """Validate an app id/namespace and substitute default values.
+
+  If the argument is None, $APPLICATION_ID!$NAMESPACE is substituted.
+
+  Args:
+    app_id: The app id argument value to be validated.
+    namespace: The namespace argument value to be validated.
+    app_id_namespace: An AppId/Namespace pair
+
+  Returns:
+    An AppIdNamespace object initialized with AppId and Namespace.
+
+  Raises:
+    BadArgumentError if the value is empty or not a string.
+  """
+  if app_id_namespace is None:
+    if app_id is None:
+      app_id = os.environ.get('APPLICATION_ID', '')
+    if namespace is None:
+      namespace = namespace_manager.get_request_namespace();
+  else:
+    if not app_id is None:
+      raise datastore_errors.BadArgumentError(
+          'app_id is overspecified.  Cannot define app_id_namespace and app_id')
+    if not namespace is None:
+      raise datastore_errors.BadArgumentError(
+          'namespace is overspecified.  ' +
+          'Cannot define app_id_namespace and namespace')
+    return parse_app_id_namespace(app_id_namespace)
+
+  return AppIdNamespace(app_id, namespace)
+
+
 class Key(object):
   """The primary key for a datastore entity.
 
@@ -172,6 +354,26 @@
     else:
       self.__reference = entity_pb.Reference()
 
+  def to_path(self):
+    """Construct the "path" of this key as a list.
+
+    Returns:
+      A list [kind_1, id_or_name_1, ..., kind_n, id_or_name_n] of the key path.
+
+    Raises:
+      datastore_errors.BadKeyError if this key does not have a valid path.
+    """
+    path = []
+    for path_element in self.__reference.path().element_list():
+      path.append(path_element.type().decode('utf-8'))
+      if path_element.has_name():
+        path.append(path_element.name().decode('utf-8'))
+      elif path_element.has_id():
+        path.append(path_element.id())
+      else:
+        raise datastore_errors.BadKeyError('Incomplete key found in to_path')
+    return path
+
   @staticmethod
   def from_path(*args, **kwds):
     """Static method to construct a Key out of a "path" (kind, id or name, ...).
@@ -202,7 +404,10 @@
       BadKeyError if the parent key is incomplete.
     """
     parent = kwds.pop('parent', None)
-    _app = ResolveAppId(kwds.pop('_app', None))
+    _app_id_namespace_obj = ResolveAppIdNamespace(
+        kwds.pop('_app', None),
+        kwds.pop('_namespace', None),
+        kwds.pop('_app_id_namespace', None))
 
     if kwds:
       raise datastore_errors.BadArgumentError(
@@ -221,17 +426,18 @@
       if not parent.has_id_or_name():
         raise datastore_errors.BadKeyError(
             'The parent Key is incomplete.')
-      if _app != parent.app():
+      if _app_id_namespace_obj != parent.app_id_namespace():
         raise datastore_errors.BadArgumentError(
-            'The _app argument (%r) should match parent.app() (%s)' %
-            (_app, parent.app()))
+            'The app_id/namespace arguments (%r) should match ' +
+            'parent.app_id_namespace().to_encoded() (%s)' %
+            (_app_id_namespace_obj, parent.app_id_namespace()))
 
     key = Key()
     ref = key.__reference
     if parent is not None:
       ref.CopyFrom(parent.__reference)
     else:
-      ref.set_app(_app)
+      ref.set_app(_app_id_namespace_obj.to_encoded())
 
     path = ref.mutable_path()
     for i in xrange(0, len(args), 2):
@@ -248,9 +454,6 @@
         elem.set_id(id_or_name)
       elif isinstance(id_or_name, basestring):
         ValidateString(id_or_name, 'name')
-        if id_or_name and id_or_name[0] in string.digits:
-          raise datastore_errors.BadArgumentError(
-            'Names may not begin with a digit; received %s.' % id_or_name)
         elem.set_name(id_or_name.encode('utf-8'))
       else:
         raise datastore_errors.BadArgumentError(
@@ -263,7 +466,21 @@
   def app(self):
     """Returns this entity's app id, a string."""
     if self.__reference.app():
-      return self.__reference.app().decode('utf-8')
+      return self.app_id_namespace().app_id().decode('utf-8')
+    else:
+      return None
+
+  def namespace(self):
+    """Returns this entity's app id, a string."""
+    if self.__reference.app():
+      return self.app_id_namespace().namespace().decode('utf-8')
+    else:
+      return None
+
+  def app_id_namespace(self):
+    """Returns this entity's app id/namespace, an appIdNamespace object."""
+    if self.__reference.app():
+      return parse_app_id_namespace(self.__reference.app())
     else:
       return None
 
@@ -339,11 +556,13 @@
       raise datastore_errors.BadKeyError(
         'ToTagUri() called for an entity with an incomplete key.')
 
-    return u'tag:%s.%s,%s:%s[%s]' % (saxutils.escape(self.app()),
-                                     os.environ['AUTH_DOMAIN'],
-                                     datetime.date.today().isoformat(),
-                                     saxutils.escape(self.kind()),
-                                     saxutils.escape(str(self)))
+    return u'tag:%s.%s,%s:%s[%s]' % (
+        saxutils.escape(self.app_id_namespace().to_encoded()),
+        os.environ['AUTH_DOMAIN'],
+        datetime.date.today().isoformat(),
+        saxutils.escape(self.kind()),
+        saxutils.escape(str(self)))
+
   ToXml = ToTagUri
 
   def entity_group(self):
@@ -436,7 +655,7 @@
       else:
         args.append(repr(elem.id()))
 
-    args.append('_app=%r' % self.__reference.app().decode('utf-8'))
+    args.append('_app_id_namespace=%r' % self.__reference.app().decode('utf-8'))
     return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
 
   def __cmp__(self, other):
@@ -459,25 +678,29 @@
     self_args = []
     other_args = []
 
-    self_args.append(self.__reference.app().decode('utf-8'))
-    other_args.append(other.__reference.app().decode('utf-8'))
+    self_args.append(self.__reference.app())
+    other_args.append(other.__reference.app())
 
     for elem in self.__reference.path().element_list():
-      self_args.append(repr(elem.type()))
+      self_args.append(elem.type())
       if elem.has_name():
-        self_args.append(repr(elem.name().decode('utf-8')))
+        self_args.append(elem.name())
       else:
         self_args.append(elem.id())
 
     for elem in other.__reference.path().element_list():
-      other_args.append(repr(elem.type()))
+      other_args.append(elem.type())
       if elem.has_name():
-        other_args.append(repr(elem.name().decode('utf-8')))
+        other_args.append(elem.name())
       else:
         other_args.append(elem.id())
 
-    result = cmp(self_args, other_args)
-    return result
+    for self_component, other_component in zip(self_args, other_args):
+      comparison = cmp(self_component, other_component)
+      if comparison != 0:
+        return comparison
+
+    return cmp(len(self_args), len(other_args))
 
   def __hash__(self):
     """Returns a 32-bit integer hash of this key.
@@ -698,6 +921,7 @@
       except datastore_errors.BadValueError:
         return NotImplemented
 
+
     return cmp((self.address, self.protocol),
                (other.address, other.protocol))
 
@@ -900,6 +1124,63 @@
     return saxutils.escape(encoded)
 
 
+class BlobKey(object):
+  """Key used to identify a blob in Blobstore.
+
+  This object wraps a string that gets used internally by the Blobstore API
+  to identify application blobs.  The BlobKey corresponds to the entity name
+  of the underlying BlobReference entity.  The structure of the key is:
+
+    _<blob-key>
+
+  This class is exposed in the API in both google.appengine.ext.db and
+  google.appengine.ext.blobstore.
+  """
+
+  def __init__(self, blob_key):
+    """Constructor.
+
+    Used to convert a string to a BlobKey.  Normally used internally by
+    Blobstore API.
+
+    Args:
+      blob_key:  Key name of BlobReference that this key belongs to.
+    """
+    self.__blob_key = blob_key
+
+  def __str__(self):
+    """Convert to string."""
+    return self.__blob_key
+
+  def __repr__(self):
+    """Returns an eval()able string representation of this key.
+
+    Returns a Python string of the form 'datastore_types.BlobKey(...)'
+    that can be used to recreate this key.
+
+    Returns:
+      string
+    """
+    s = type(self).__module__
+    return '%s.%s(%r)' % (type(self).__module__,
+                       type(self).__name__,
+                       self.__blob_key)
+
+  def __cmp__(self, other):
+    if type(other) is type(self):
+      return cmp(str(self), str(other))
+    elif isinstance(other, basestring):
+      return cmp(self.__blob_key, other)
+    else:
+      return NotImplemented
+
+  def __hash__(self):
+    return hash(self.__blob_key)
+
+  def ToXml(self):
+    return str(self)
+
+
 _PROPERTY_MEANINGS = {
 
 
@@ -916,6 +1197,7 @@
   PhoneNumber:       entity_pb.Property.GD_PHONENUMBER,
   PostalAddress:     entity_pb.Property.GD_POSTALADDRESS,
   Rating:            entity_pb.Property.GD_RATING,
+  BlobKey:           entity_pb.Property.BLOBKEY,
 }
 
 _PROPERTY_TYPES = frozenset([
@@ -940,6 +1222,7 @@
   type(None),
   unicode,
   users.User,
+  BlobKey,
 ])
 
 _RAW_PROPERTY_TYPES = (Blob, Text)
@@ -1043,6 +1326,7 @@
   type(None): ValidatePropertyNothing,
   unicode: ValidatePropertyString,
   users.User: ValidatePropertyNothing,
+  BlobKey: ValidatePropertyString,
 }
 
 assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
@@ -1222,6 +1506,7 @@
   """
   pbvalue.set_doublevalue(value)
 
+
 _PACK_PROPERTY_VALUES = {
   Blob: PackBlob,
   ByteString: PackBlob,
@@ -1244,6 +1529,7 @@
   type(None): lambda name, value, pbvalue: None,
   unicode: PackString,
   users.User: PackUser,
+  BlobKey: PackString,
 }
 
 assert set(_PACK_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
@@ -1331,7 +1617,6 @@
   entity_pb.Property.ATOM_CATEGORY:     Category,
   entity_pb.Property.ATOM_LINK:         Link,
   entity_pb.Property.GD_EMAIL:          Email,
-  entity_pb.Property.GEORSS_POINT:      lambda coords: GeoPt(*coords),
   entity_pb.Property.GD_IM:             IM,
   entity_pb.Property.GD_PHONENUMBER:    PhoneNumber,
   entity_pb.Property.GD_POSTALADDRESS:  PostalAddress,
@@ -1339,6 +1624,7 @@
   entity_pb.Property.BLOB:              Blob,
   entity_pb.Property.BYTESTRING:        ByteString,
   entity_pb.Property.TEXT:              Text,
+  entity_pb.Property.BLOBKEY:           BlobKey,
 }
 
 
@@ -1368,7 +1654,7 @@
   elif pbval.has_referencevalue():
     value = FromReferenceProperty(pbval)
   elif pbval.has_pointvalue():
-    value = (pbval.pointvalue().x(), pbval.pointvalue().y())
+    value = GeoPt(pbval.pointvalue().x(), pbval.pointvalue().y())
   elif pbval.has_uservalue():
     email = unicode(pbval.uservalue().email().decode('utf-8'))
     auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
@@ -1381,7 +1667,7 @@
     value = None
 
   try:
-    if pb.has_meaning():
+    if pb.has_meaning() and pb.meaning() in _PROPERTY_CONVERSIONS:
       conversion = _PROPERTY_CONVERSIONS[meaning]
       value = conversion(value)
   except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
@@ -1437,6 +1723,7 @@
     'gd:phonenumber':   PhoneNumber,
     'gd:postaladdress': PostalAddress,
     'gd:rating':        Rating,
+    'blobkey':          BlobKey,
     }
 
 
--- a/thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/images/images_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -80,13 +80,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -150,13 +154,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -466,6 +474,10 @@
     if self.has_autolevels_: res+=prefix+("autolevels: %s\n" % self.DebugFormatBool(self.autolevels_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kwidth = 1
   kheight = 2
   krotate = 3
@@ -477,43 +489,33 @@
   kcrop_bottom_y = 9
   kautolevels = 10
 
-  _TEXT = (
-   "ErrorCode",
-   "width",
-   "height",
-   "rotate",
-   "horizontal_flip",
-   "vertical_flip",
-   "crop_left_x",
-   "crop_top_y",
-   "crop_right_x",
-   "crop_bottom_y",
-   "autolevels",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "width",
+    2: "height",
+    3: "rotate",
+    4: "horizontal_flip",
+    5: "vertical_flip",
+    6: "crop_left_x",
+    7: "crop_top_y",
+    8: "crop_right_x",
+    9: "crop_bottom_y",
+    10: "autolevels",
+  }, 10)
 
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.FLOAT,
+    7: ProtocolBuffer.Encoder.FLOAT,
+    8: ProtocolBuffer.Encoder.FLOAT,
+    9: ProtocolBuffer.Encoder.FLOAT,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -583,18 +585,21 @@
     if self.has_content_: res+=prefix+("content: %s\n" % self.DebugFormatString(self.content_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcontent = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "content",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "content",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -673,18 +678,21 @@
     if self.has_mime_type_: res+=prefix+("mime_type: %s\n" % self.DebugFormatInt32(self.mime_type_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kmime_type = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "mime_type",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "mime_type",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -834,26 +842,27 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
   ktransform = 2
   koutput = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-   "transform",
-   "output",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+    2: "transform",
+    3: "output",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -926,18 +935,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1152,34 +1164,33 @@
     if self.has_anchor_: res+=prefix+("anchor: %s\n" % self.DebugFormatInt32(self.anchor_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   ksource_index = 1
   kx_offset = 2
   ky_offset = 3
   kopacity = 4
   kanchor = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "source_index",
-   "x_offset",
-   "y_offset",
-   "opacity",
-   "anchor",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "source_index",
+    2: "x_offset",
+    3: "y_offset",
+    4: "opacity",
+    5: "anchor",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.FLOAT,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1339,30 +1350,30 @@
     if self.has_color_: res+=prefix+("color: %s\n" % self.DebugFormatInt32(self.color_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kwidth = 1
   kheight = 2
   koutput = 3
   kcolor = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "width",
-   "height",
-   "output",
-   "color",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "width",
+    2: "height",
+    3: "output",
+    4: "color",
+  }, 4)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1523,26 +1534,27 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
   koptions = 2
   kcanvas = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-   "options",
-   "canvas",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+    2: "options",
+    3: "canvas",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1615,18 +1627,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1699,18 +1714,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kimage = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "image",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "image",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1855,26 +1873,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kred = 1
   kgreen = 2
   kblue = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "red",
-   "green",
-   "blue",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "red",
+    2: "green",
+    3: "blue",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1947,18 +1966,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   khistogram = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "histogram",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "histogram",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py	Sun Sep 06 23:31:53 2009 +0200
@@ -480,19 +480,9 @@
     """
     return self.__enqueued
 
-  def add(self, queue_name=_DEFAULT_QUEUE):
-    """Adds this Task to a queue.
-
-    Args:
-      queue_name: Name of the queue to add this Task to. (optional)
-
-    Returns:
-      This Task itself.
-
-    Raises:
-      BadTaskStateError if this task has already been enqueued.
-    """
-    return Queue(queue_name).add(self)
+  def add(self, queue_name=_DEFAULT_QUEUE, transactional=True):
+    """Adds this Task to a queue. See Queue.add."""
+    return Queue(queue_name).add(self, transactional=transactional)
 
 
 class Queue(object):
@@ -514,11 +504,13 @@
     self.__name = name
     self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
 
-  def add(self, task):
+  def add(self, task, transactional=True):
     """Adds a Task to this Queue.
 
     Args:
       task: The Task to add.
+      transactional: If false adds the task to a queue irrespectively to the
+        enclosing transaction success or failure. (optional)
 
     Returns:
       The Task that was supplied to this method.
@@ -555,6 +547,10 @@
       header.set_key(key)
       header.set_value(value)
 
+    if transactional:
+      from google.appengine.api import datastore
+      datastore._MaybeSetupTransaction(request, [])
+
     call_tuple = ('taskqueue', 'Add', request, response)
     apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(*call_tuple)
     try:
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -22,6 +22,7 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
+from google.appengine.datastore.datastore_v3_pb import *
 class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
@@ -37,6 +38,7 @@
   TASK_ALREADY_EXISTS =   10
   TOMBSTONED_TASK =   11
   INVALID_ETA  =   12
+  INVALID_REQUEST =   13
 
   _ErrorCode_NAMES = {
     0: "OK",
@@ -52,6 +54,7 @@
     10: "TASK_ALREADY_EXISTS",
     11: "TOMBSTONED_TASK",
     12: "INVALID_ETA",
+    13: "INVALID_REQUEST",
   }
 
   def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -96,13 +99,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -234,9 +241,12 @@
   url_ = ""
   has_body_ = 0
   body_ = ""
+  has_transaction_ = 0
+  transaction_ = None
 
   def __init__(self, contents=None):
     self.header_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
     if contents is not None: self.MergeFromString(contents)
 
   def queue_name(self): return self.queue_name_
@@ -333,6 +343,24 @@
 
   def has_body(self): return self.has_body_
 
+  def transaction(self):
+    if self.transaction_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.transaction_ is None: self.transaction_ = Transaction()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+  def clear_transaction(self):
+    if self.has_transaction_:
+      self.has_transaction_ = 0;
+      if self.transaction_ is not None: self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -343,6 +371,7 @@
     if (x.has_url()): self.set_url(x.url())
     for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
     if (x.has_body()): self.set_body(x.body())
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
 
   def Equals(self, x):
     if x is self: return 1
@@ -361,6 +390,8 @@
       if e1 != e2: return 0
     if self.has_body_ != x.has_body_: return 0
     if self.has_body_ and self.body_ != x.body_: return 0
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -383,6 +414,7 @@
         debug_strs.append('Required field: url not set.')
     for p in self.header_:
       if not p.IsInitialized(debug_strs): initialized=0
+    if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
@@ -395,6 +427,7 @@
     n += 2 * len(self.header_)
     for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
     if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
+    if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
     return n + 4
 
   def Clear(self):
@@ -405,6 +438,7 @@
     self.clear_url()
     self.clear_header()
     self.clear_body()
+    self.clear_transaction()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -425,6 +459,10 @@
     if (self.has_body_):
       out.putVarInt32(74)
       out.putPrefixedString(self.body_)
+    if (self.has_transaction_):
+      out.putVarInt32(82)
+      out.putVarInt32(self.transaction_.ByteSize())
+      self.transaction_.OutputUnchecked(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -450,6 +488,12 @@
       if tt == 74:
         self.set_body(d.getPrefixedString())
         continue
+      if tt == 82:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -470,8 +514,16 @@
       res+=prefix+"}\n"
       cnt+=1
     if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kqueue_name = 1
   ktask_name = 2
   keta_usec = 3
@@ -481,41 +533,35 @@
   kHeaderkey = 7
   kHeadervalue = 8
   kbody = 9
-
-  _TEXT = (
-   "ErrorCode",
-   "queue_name",
-   "task_name",
-   "eta_usec",
-   "url",
-   "method",
-   "Header",
-   "key",
-   "value",
-   "body",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  ktransaction = 10
 
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "queue_name",
+    2: "task_name",
+    3: "eta_usec",
+    4: "url",
+    5: "method",
+    6: "Header",
+    7: "key",
+    8: "value",
+    9: "body",
+    10: "transaction",
+  }, 10)
 
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.STARTGROUP,
+    7: ProtocolBuffer.Encoder.STRING,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.STRING,
+  }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -582,18 +628,21 @@
     if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kchosen_task_name = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "chosen_task_name",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "chosen_task_name",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -779,34 +828,33 @@
     if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kqueue_name = 2
   kbucket_refill_per_second = 3
   kbucket_capacity = 4
   kuser_specified_rate = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "queue_name",
-   "bucket_refill_per_second",
-   "bucket_capacity",
-   "user_specified_rate",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "queue_name",
+    3: "bucket_refill_per_second",
+    4: "bucket_capacity",
+    5: "user_specified_rate",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.DOUBLE,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -850,13 +898,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -956,22 +1008,24 @@
     if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kmax_rows = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "max_rows",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "max_rows",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1204,34 +1258,33 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kQueueGroup = 1
   kQueuequeue_name = 2
   kQueuebucket_refill_per_second = 3
   kQueuebucket_capacity = 4
   kQueueuser_specified_rate = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "Queue",
-   "queue_name",
-   "bucket_refill_per_second",
-   "bucket_capacity",
-   "user_specified_rate",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Queue",
+    2: "queue_name",
+    3: "bucket_refill_per_second",
+    4: "bucket_capacity",
+    5: "user_specified_rate",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.DOUBLE,
+    4: ProtocolBuffer.Encoder.DOUBLE,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1366,26 +1419,27 @@
     if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kqueue_name = 2
   kmax_num_tasks = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "queue_name",
-   "max_num_tasks",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "queue_name",
+    3: "max_num_tasks",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1563,26 +1617,27 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kQueueStatsGroup = 1
   kQueueStatsnum_tasks = 2
   kQueueStatsoldest_eta_usec = 3
 
-  _TEXT = (
-   "ErrorCode",
-   "QueueStats",
-   "num_tasks",
-   "oldest_eta_usec",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "QueueStats",
+    2: "num_tasks",
+    3: "oldest_eta_usec",
+  }, 3)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -43,9 +43,20 @@
 
 DEFAULT_BUCKET_SIZE = 5
 
+MAX_ETA_DELTA_DAYS = 30
+
 
 def _ParseQueueYaml(unused_self, root_path):
-  """Load the queue.yaml file and parse it."""
+  """Loads the queue.yaml file and parses it.
+
+  Args:
+    unused_self: Allows this function to be bound to a class member. Not used.
+    root_path: Directory containing queue.yaml. Not used.
+
+  Returns:
+    None if queue.yaml doesn't exist, otherwise a queueinfo.QueueEntry object
+    populaeted from the queue.yaml.
+  """
   if root_path is None:
     return None
   for queueyaml in ('queue.yaml', 'queue.yml'):
@@ -61,8 +72,16 @@
   return None
 
 
-def _CompareEta(a, b):
-  """Python sort comparator for task ETAs."""
+def _CompareTasksByEta(a, b):
+  """Python sort comparator for tasks by estimated time of arrival (ETA).
+
+  Args:
+    a: A taskqueue_service_pb.TaskQueueAddRequest.
+    b: A taskqueue_service_pb.TaskQueueAddRequest.
+
+  Returns:
+    Standard 1/0/-1 comparison result.
+  """
   if a.eta_usec() > b.eta_usec():
     return 1
   if a.eta_usec() < b.eta_usec():
@@ -106,29 +125,63 @@
         available.
     """
     super(TaskQueueServiceStub, self).__init__(service_name)
-    self.taskqueues = {}
-    self.next_task_id = 1
-    self.root_path = root_path
+    self._taskqueues = {}
+    self._next_task_id = 1
+    self._root_path = root_path
+
+  def _Dynamic_Add(self, request, response):
+    """Local implementation of the Add RPC in TaskQueueService.
+
+    Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+    See taskqueue_service.proto for a full description of the RPC.
 
-  def _Dynamic_Add(self, request, unused_response):
-    if not self._ValidQueue(request.queue_name()):
+    Args:
+      request: A taskqueue_service_pb.TaskQueueAddRequest.
+      response: A taskqueue_service_pb.TaskQueueAddResponse.
+    """
+    if request.eta_usec() < 0:
+      raise apiproxy_errors.ApplicationError(
+          taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+    eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6)
+    max_eta = (datetime.datetime.utcnow() +
+               datetime.timedelta(days=MAX_ETA_DELTA_DAYS))
+    if eta > max_eta:
+      raise apiproxy_errors.ApplicationError(
+          taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+    if not self._IsValidQueue(request.queue_name()):
       raise apiproxy_errors.ApplicationError(
           taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
-      return
 
     if not request.task_name():
-      request.set_task_name('task%d' % self.next_task_id)
-      self.next_task_id += 1
+      request.set_task_name('task%d' % self._next_task_id)
+      response.set_chosen_task_name(request.task_name())
+      self._next_task_id += 1
 
-    tasks = self.taskqueues.setdefault(request.queue_name(), [])
+    tasks = self._taskqueues.setdefault(request.queue_name(), [])
+    for task in tasks:
+      if task.task_name() == request.task_name():
+        raise apiproxy_errors.ApplicationError(
+            taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
     tasks.append(request)
-    tasks.sort(_CompareEta)
-    return
+    tasks.sort(_CompareTasksByEta)
+
+  def _IsValidQueue(self, queue_name):
+    """Determines whether a queue is valid, i.e. tasks can be added to it.
 
-  def _ValidQueue(self, queue_name):
+    Valid queues are the 'default' queue, plus any queues in the queue.yaml
+    file.
+
+    Args:
+      queue_name: the name of the queue to validate.
+
+    Returns:
+      True iff queue is valid.
+    """
     if queue_name == 'default':
       return True
-    queue_info = self.queue_yaml_parser(self.root_path)
+    queue_info = self.queue_yaml_parser(self._root_path)
     if queue_info and queue_info.queue:
       for entry in queue_info.queue:
         if entry.name == queue_name:
@@ -140,10 +193,16 @@
 
     Returns:
       A list of dictionaries, where each dictionary contains one queue's
-      attributes.
+      attributes. E.g.:
+        [{'name': 'some-queue',
+          'max_rate': '1/s',
+          'bucket_size': 5,
+          'oldest_task': '2009/02/02 05:37:42',
+          'eta_delta': '0:00:06.342511 ago',
+          'tasks_in_queue': 12}, ...]
     """
     queues = []
-    queue_info = self.queue_yaml_parser(self.root_path)
+    queue_info = self.queue_yaml_parser(self._root_path)
     has_default = False
     if queue_info and queue_info.queue:
       for entry in queue_info.queue:
@@ -158,7 +217,7 @@
         else:
           queue['bucket_size'] = DEFAULT_BUCKET_SIZE
 
-        tasks = self.taskqueues.setdefault(entry.name, [])
+        tasks = self._taskqueues.setdefault(entry.name, [])
         if tasks:
           queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
           queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
@@ -173,7 +232,7 @@
       queue['max_rate'] = DEFAULT_RATE
       queue['bucket_size'] = DEFAULT_BUCKET_SIZE
 
-      tasks = self.taskqueues.get('default', [])
+      tasks = self._taskqueues.get('default', [])
       if tasks:
         queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
         queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
@@ -190,9 +249,24 @@
 
     Returns:
       A list of dictionaries, where each dictionary contains one task's
-      attributes.
+      attributes. E.g.
+        [{'name': 'task-123',
+          'url': '/update',
+          'method': 'GET',
+          'eta': '2009/02/02 05:37:42',
+          'eta_delta': '0:00:06.342511 ago',
+          'body': '',
+          'headers': {'X-AppEngine-QueueName': 'update-queue',
+                      'X-AppEngine-TaskName': 'task-123',
+                      'X-AppEngine-TaskRetryCount': '0',
+                      'X-AppEngine-Development-Payload': '1',
+                      'Content-Length': 0,
+                      'Content-Type': 'application/octet-streamn'}, ...]
+
+    Raises:
+      ValueError: A task request contains an unknown HTTP method type.
     """
-    tasks = self.taskqueues.get(queue_name, [])
+    tasks = self._taskqueues.get(queue_name, [])
     result_tasks = []
     for task_request in tasks:
       task = {}
@@ -200,16 +274,18 @@
       task['name'] = task_request.task_name()
       task['url'] = task_request.url()
       method = task_request.method()
-      if (method == taskqueue_service_pb.TaskQueueAddRequest.GET):
+      if method == taskqueue_service_pb.TaskQueueAddRequest.GET:
         task['method'] = 'GET'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.POST):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.POST:
         task['method'] = 'POST'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.HEAD):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.HEAD:
         task['method'] = 'HEAD'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.PUT):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.PUT:
         task['method'] = 'PUT'
-      elif (method == taskqueue_service_pb.TaskQueueAddRequest.DELETE):
+      elif method == taskqueue_service_pb.TaskQueueAddRequest.DELETE:
         task['method'] = 'DELETE'
+      else:
+        raise ValueError('Unexpected method: %d' % method)
 
       task['eta'] = _FormatEta(task_request.eta_usec())
       task['eta_delta'] = _EtaDelta(task_request.eta_usec())
@@ -236,7 +312,7 @@
       queue_name: the name of the queue to delete the task from.
       task_name: the name of the task to delete.
     """
-    tasks = self.taskqueues.get(queue_name, [])
+    tasks = self._taskqueues.get(queue_name, [])
     for task in tasks:
       if task.task_name() == task_name:
         tasks.remove(task)
@@ -248,4 +324,4 @@
     Args:
       queue_name: the name of the queue to remove tasks from.
     """
-    self.taskqueues[queue_name] = []
+    self._taskqueues[queue_name] = []
--- a/thirdparty/google_appengine/google/appengine/api/mail.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail.py	Sun Sep 06 23:31:53 2009 +0200
@@ -25,10 +25,12 @@
 
 
 
+
+import email
 from email import MIMEBase
 from email import MIMEMultipart
 from email import MIMEText
-import types
+import logging
 
 from google.appengine.api import api_base_pb
 from google.appengine.api import apiproxy_stub_map
@@ -38,51 +40,53 @@
 from google.appengine.runtime import apiproxy_errors
 
 
+
 ERROR_MAP = {
-  mail_service_pb.MailServiceError.BAD_REQUEST:
-    BadRequestError,
+    mail_service_pb.MailServiceError.BAD_REQUEST:
+      BadRequestError,
 
-  mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
-    InvalidSenderError,
+    mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
+      InvalidSenderError,
 
-  mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
-    InvalidAttachmentTypeError,
+    mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
+      InvalidAttachmentTypeError,
 }
 
 
 EXTENSION_MIME_MAP = {
-  'asc': 'text/plain',
-  'bmp': 'image/x-ms-bmp',
-  'css': 'text/css',
-  'csv': 'text/csv',
-  'diff': 'text/plain',
-  'gif': 'image/gif',
-  'htm': 'text/html',
-  'html': 'text/html',
-  'ics': 'text/calendar',
-  'jpe': 'image/jpeg',
-  'jpeg': 'image/jpeg',
-  'jpg': 'image/jpeg',
-  'pdf': 'application/pdf',
-  'png': 'image/png',
-  'pot': 'text/plain',
-  'rss': 'text/rss+xml',
-  'text': 'text/plain',
-  'tif': 'image/tiff',
-  'tiff': 'image/tiff',
-  'txt': 'text/plain',
-  'vcf': 'text/directory',
-  'wbmp': 'image/vnd.wap.wbmp',
-}
+    'asc': 'text/plain',
+    'bmp': 'image/x-ms-bmp',
+    'css': 'text/css',
+    'csv': 'text/csv',
+    'diff': 'text/plain',
+    'gif': 'image/gif',
+    'htm': 'text/html',
+    'html': 'text/html',
+    'ics': 'text/calendar',
+    'jpe': 'image/jpeg',
+    'jpeg': 'image/jpeg',
+    'jpg': 'image/jpeg',
+    'pdf': 'application/pdf',
+    'png': 'image/png',
+    'pot': 'text/plain',
+    'rss': 'text/rss+xml',
+    'text': 'text/plain',
+    'tif': 'image/tiff',
+    'tiff': 'image/tiff',
+    'txt': 'text/plain',
+    'vcf': 'text/directory',
+    'wbmp': 'image/vnd.wap.wbmp',
+    }
 
 EXTENSION_WHITELIST = frozenset(EXTENSION_MIME_MAP.iterkeys())
 
 
 def invalid_email_reason(email_address, field):
-  """Determine reason why email is invalid
+  """Determine reason why email is invalid.
 
   Args:
     email_address: Email to check.
+    field: Field that is invalid.
 
   Returns:
     String indicating invalid email reason if there is one,
@@ -93,7 +97,7 @@
 
   if isinstance(email_address, users.User):
     email_address = email_address.email()
-  if not isinstance(email_address, types.StringTypes):
+  if not isinstance(email_address, basestring):
     return 'Invalid email address type for %s.' % field
   stripped_address = email_address.strip()
   if not stripped_address:
@@ -118,10 +122,11 @@
 
 
 def check_email_valid(email_address, field):
-  """Check that email is valid
+  """Check that email is valid.
 
   Args:
     email_address: Email to check.
+    field: Field to check.
 
   Raises:
     InvalidEmailError if email_address is invalid.
@@ -165,7 +170,7 @@
     Single tuple with email in it if only one email string provided,
     else returns emails as is.
   """
-  if isinstance(emails, types.StringTypes):
+  if isinstance(emails, basestring):
     return emails,
   return emails
 
@@ -183,11 +188,29 @@
     Single tuple with attachment tuple in it if only one attachment provided,
     else returns attachments as is.
   """
-  if len(attachments) == 2 and isinstance(attachments[0], types.StringTypes):
+  if len(attachments) == 2 and isinstance(attachments[0], basestring):
     return attachments,
   return attachments
 
 
+def _parse_mime_message(mime_message):
+  """Helper function converts a mime_message in to email.Message.Message.
+
+  Args:
+    mime_message: MIME Message, string or file containing mime message.
+
+  Returns:
+    Instance of email.Message.Message.  Will return mime_message if already
+    an instance.
+  """
+  if isinstance(mime_message, email.Message.Message):
+    return mime_message
+  elif isinstance(mime_message, basestring):
+    return email.message_from_string(mime_message)
+  else:
+    return email.message_from_file(mime_message)
+
+
 def send_mail(sender,
               to,
               subject,
@@ -285,7 +308,7 @@
   to a list of comma separated email addresses.
 
   Args:
-    message: Message PB to convert to MIMEMultitype.
+    protocol_message: Message PB to convert to MIMEMultitype.
 
   Returns:
     MIMEMultitype representing the provided MailMessage.
@@ -334,7 +357,7 @@
 
 
 def _to_str(value):
-  """Helper function to make sure unicode values converted to utf-8
+  """Helper function to make sure unicode values converted to utf-8.
 
   Args:
     value: str or unicode to convert to utf-8.
@@ -346,6 +369,129 @@
     return value.encode('utf-8')
   return value
 
+
+class EncodedPayload(object):
+  """Wrapper for a payload that contains encoding information.
+
+  When an email is recieved, it is usually encoded using a certain
+  character set, and then possibly further encoded using a transfer
+  encoding in that character set.  Most of the times, it is possible
+  to decode the encoded payload as is, however, in the case where it
+  is not, the encoded payload and the original encoding information
+  must be preserved.
+
+  Attributes:
+    payload: The original encoded payload.
+    charset: The character set of the encoded payload.  None means use
+      default character set.
+    encoding: The transfer encoding of the encoded payload.  None means
+      content not encoded.
+  """
+
+  def __init__(self, payload, charset=None, encoding=None):
+    """Constructor.
+
+    Args:
+      payload: Maps to attribute of the same name.
+      charset: Maps to attribute of the same name.
+      encoding: Maps to attribute of the same name.
+    """
+    self.payload = payload
+    self.charset = charset
+    self.encoding = encoding
+
+  def decode(self):
+    """Attempt to decode the encoded data.
+
+    Attempt to use pythons codec library to decode the payload.  All
+    exceptions are passed back to the caller.
+
+    Returns:
+      Binary or unicode version of payload content.
+    """
+    payload = self.payload
+
+    if self.encoding and self.encoding.lower() != '7bit':
+      try:
+        payload = payload.decode(self.encoding).lower()
+      except LookupError:
+        raise UnknownEncodingError('Unknown decoding %s.' % self.encoding)
+      except (Exception, Error), e:
+        raise PayloadEncodingError('Could not decode payload: %s' % e)
+
+    if self.charset and str(self.charset).lower() != '7bit':
+      try:
+        payload = payload.decode(str(self.charset)).lower()
+      except LookupError:
+        raise UnknownCharsetError('Unknown charset %s.' % self.charset)
+      except (Exception, Error), e:
+        raise PayloadEncodingError('Could read characters: %s' % e)
+
+    return payload
+
+  def __eq__(self, other):
+    """Equality operator.
+
+    Args:
+      other: The other EncodedPayload object to compare with.  Comparison
+        with other object types are not implemented.
+
+    Returns:
+      True of payload and encodings are equal, else false.
+    """
+    if isinstance(other, EncodedPayload):
+      return (self.payload == other.payload and
+              self.charset == other.charset and
+              self.encoding == other.encoding)
+    else:
+      return NotImplemented
+
+  def copy_to(self, mime_message):
+    """Copy contents to MIME message payload.
+
+    If no content transfer encoding is specified, and the character set does
+    not equal the over-all message encoding, the payload will be base64
+    encoded.
+
+    Args:
+      mime_message: Message instance to receive new payload.
+    """
+    if self.encoding:
+      mime_message['content-transfer-encoding'] = self.encoding
+    mime_message.set_payload(self.payload, self.charset)
+
+  def to_mime_message(self):
+    """Convert to MIME message.
+
+    Returns:
+      MIME message instance of payload.
+    """
+    mime_message = email.Message.Message()
+    self.copy_to(mime_message)
+    return mime_message
+
+  def __str__(self):
+    """String representation of encoded message.
+
+    Returns:
+      MIME encoded representation of encoded payload as an independent message.
+    """
+    return str(self.to_mime_message())
+
+  def __repr__(self):
+    """Basic representation of encoded payload.
+
+    Returns:
+      Payload itself is represented by its hash value.
+    """
+    result = '<EncodedPayload payload=#%d' % hash(self.payload)
+    if self.charset:
+      result += ' charset=%s' % self.charset
+    if self.encoding:
+      result += ' encoding=%s' % self.encoding
+    return result + '>'
+
+
 class _EmailMessageBase(object):
   """Base class for email API service objects.
 
@@ -354,25 +500,39 @@
   """
 
   PROPERTIES = set([
-    'sender',
-    'reply_to',
-    'subject',
-    'body',
-    'html',
-    'attachments',
+      'sender',
+      'reply_to',
+      'subject',
+      'body',
+      'html',
+      'attachments',
   ])
 
-  def __init__(self, **kw):
+  PROPERTIES.update(('to', 'cc', 'bcc'))
+
+  def __init__(self, mime_message=None, **kw):
     """Initialize Email message.
 
     Creates new MailMessage protocol buffer and initializes it with any
     keyword arguments.
 
     Args:
+      mime_message: MIME message to initialize from.  If instance of
+        email.Message.Message will take ownership as original message.
       kw: List of keyword properties as defined by PROPERTIES.
     """
+    if mime_message:
+      mime_message = _parse_mime_message(mime_message)
+      self.update_from_mime_message(mime_message)
+      self.__original = mime_message
+
     self.initialize(**kw)
 
+  @property
+  def original(self):
+    """Get original MIME message from which values were set."""
+    return self.__original
+
   def initialize(self, **kw):
     """Keyword initialization.
 
@@ -398,6 +558,7 @@
       - Subject must be set.
       - A recipient must be specified.
       - Must contain a body.
+      - All bodies and attachments must decode properly.
 
     This check does not include determining if the sender is actually
     authorized to send email for the application.
@@ -410,17 +571,45 @@
         MissingSenderError:         No sender specified.
         MissingSubjectError:        Subject is not specified.
         MissingBodyError:           No body specified.
+        PayloadEncodingError:       Payload is not properly encoded.
+        UnknownEncodingError:       Payload has unknown encoding.
+        UnknownCharsetError:        Payload has unknown character set.
     """
     if not hasattr(self, 'sender'):
       raise MissingSenderError()
     if not hasattr(self, 'subject'):
       raise MissingSubjectError()
-    if not hasattr(self, 'body') and not hasattr(self, 'html'):
+
+    found_body = False
+
+    try:
+      body = self.body
+    except AttributeError:
+      pass
+    else:
+      if isinstance(body, EncodedPayload):
+        body.decode()
+      found_body = True
+
+    try:
+      html = self.html
+    except AttributeError:
+      pass
+    else:
+      if isinstance(html, EncodedPayload):
+        html.decode()
+      found_body = True
+
+    if not found_body:
       raise MissingBodyError()
+
     if hasattr(self, 'attachments'):
       for file_name, data in _attachment_sequence(self.attachments):
         _GetMimeType(file_name)
 
+        if isinstance(data, EncodedPayload):
+          data.decode()
+
   def CheckInitialized(self):
     self.check_initialized()
 
@@ -448,6 +637,10 @@
 
     Returns:
       MailMessage protocol version of mail message.
+
+    Raises:
+      Passes through decoding errors that occur when using when decoding
+      EncodedPayload objects.
     """
     self.check_initialized()
     message = mail_service_pb.MailMessage()
@@ -456,13 +649,22 @@
     if hasattr(self, 'reply_to'):
       message.set_replyto(_to_str(self.reply_to))
     message.set_subject(_to_str(self.subject))
+
     if hasattr(self, 'body'):
-      message.set_textbody(_to_str(self.body))
+      body = self.body
+      if isinstance(body, EncodedPayload):
+        body = body.decode()
+      message.set_textbody(_to_str(body))
     if hasattr(self, 'html'):
-      message.set_htmlbody(_to_str(self.html))
+      html = self.html
+      if isinstance(html, EncodedPayload):
+        html = html.decode()
+      message.set_htmlbody(_to_str(html))
 
     if hasattr(self, 'attachments'):
       for file_name, data in _attachment_sequence(self.attachments):
+        if isinstance(data, EncodedPayload):
+          data = data.decode()
         attachment = message.add_attachment()
         attachment.set_filename(_to_str(file_name))
         attachment.set_data(_to_str(data))
@@ -485,7 +687,7 @@
       MissingSenderError:         No sender specified.
       MissingSubjectError:        Subject is not specified.
       MissingBodyError:           No body specified.
-  """
+    """
     return mail_message_to_mime_message(self.ToProto())
 
   def ToMIMEMessage(self):
@@ -517,8 +719,8 @@
 
   def _check_attachment(self, attachment):
     file_name, data = attachment
-    if not (isinstance(file_name, types.StringTypes) or
-            isinstance(data, types.StringTypes)):
+    if not (isinstance(file_name, basestring) or
+            isinstance(data, basestring)):
       raise TypeError()
 
   def _check_attachments(self, attachments):
@@ -534,7 +736,7 @@
     Raises:
       TypeError if values are not string type.
     """
-    if len(attachments) == 2 and isinstance(attachments[0], types.StringTypes):
+    if len(attachments) == 2 and isinstance(attachments[0], basestring):
       self._check_attachment(attachments)
     else:
       for attachment in attachments:
@@ -548,21 +750,134 @@
     Args:
       attr: Attribute to access.
       value: New value for field.
+
+    Raises:
+      ValueError: If provided with an empty field.
+      AttributeError: If not an allowed assignment field.
     """
-    if attr in ['sender', 'reply_to']:
-      check_email_valid(value, attr)
+    if not attr.startswith('_EmailMessageBase'):
+      if attr in ['sender', 'reply_to']:
+        check_email_valid(value, attr)
 
-    if not value:
-      raise ValueError('May not set empty value for \'%s\'' % attr)
+      if not value:
+        raise ValueError('May not set empty value for \'%s\'' % attr)
 
-    if attr not in self.PROPERTIES:
-      raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
+      if attr not in self.PROPERTIES:
+        raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
 
-    if attr == 'attachments':
-      self._check_attachments(value)
+      if attr == 'attachments':
+        self._check_attachments(value)
 
     super(_EmailMessageBase, self).__setattr__(attr, value)
 
+  def _add_body(self, content_type, payload):
+    """Add body to email from payload.
+
+    Will overwrite any existing default plain or html body.
+
+    Args:
+      content_type: Content-type of body.
+      payload: Payload to store body as.
+    """
+    if content_type == 'text/plain':
+      self.body = payload
+    elif content_type == 'text/html':
+      self.html = payload
+
+  def _update_payload(self, mime_message):
+    """Update payload of mail message from mime_message.
+
+    This function works recusively when it receives a multipart body.
+    If it receives a non-multi mime object, it will determine whether or
+    not it is an attachment by whether it has a filename or not.  Attachments
+    and bodies are then wrapped in EncodedPayload with the correct charsets and
+    encodings.
+
+    Args:
+      mime_message: A Message MIME email object.
+    """
+    payload = mime_message.get_payload()
+
+    if payload:
+      if mime_message.get_content_maintype() == 'multipart':
+        for alternative in payload:
+          self._update_payload(alternative)
+      else:
+        filename = mime_message.get_param('filename',
+                                          header='content-disposition')
+        if not filename:
+          filename = mime_message.get_param('name')
+
+        payload = EncodedPayload(payload,
+                                 mime_message.get_charset(),
+                                 mime_message['content-transfer-encoding'])
+
+        if filename:
+          try:
+            attachments = self.attachments
+          except AttributeError:
+            self.attachments = (filename, payload)
+          else:
+            if isinstance(attachments[0], basestring):
+              self.attachments = [attachments]
+              attachments = self.attachments
+            attachments.append((filename, payload))
+        else:
+          self._add_body(mime_message.get_content_type(), payload)
+
+  def update_from_mime_message(self, mime_message):
+    """Copy information from a mime message.
+
+    Set information of instance to values of mime message.  This method
+    will only copy values that it finds.  Any missing values will not
+    be copied, nor will they overwrite old values with blank values.
+
+    This object is not guaranteed to be initialized after this call.
+
+    Args:
+      mime_message: email.Message instance to copy information from.
+
+    Returns:
+      MIME Message instance of mime_message argument.
+    """
+    mime_message = _parse_mime_message(mime_message)
+
+    sender = mime_message['from']
+    if sender:
+      self.sender = sender
+
+    reply_to = mime_message['reply-to']
+    if reply_to:
+      self.reply_to = reply_to
+
+    subject = mime_message['subject']
+    if subject:
+      self.subject = subject
+
+    self._update_payload(mime_message)
+
+  def bodies(self, content_type=None):
+    """Iterate over all bodies.
+
+    Yields:
+      Tuple (content_type, payload) for html and body in that order.
+    """
+    if (not content_type or
+        content_type == 'text' or
+        content_type == 'text/html'):
+      try:
+        yield 'text/html', self.html
+      except AttributeError:
+        pass
+
+    if (not content_type or
+        content_type == 'text' or
+        content_type == 'text/plain'):
+      try:
+        yield 'text/plain', self.body
+      except AttributeError:
+        pass
+
 
 class EmailMessage(_EmailMessageBase):
   """Main interface to email API service.
@@ -592,8 +907,7 @@
   """
 
   _API_CALL = 'Send'
-  PROPERTIES = _EmailMessageBase.PROPERTIES
-  PROPERTIES.update(('to', 'cc', 'bcc'))
+  PROPERTIES = set(_EmailMessageBase.PROPERTIES)
 
   def check_initialized(self):
     """Provide additional checks to ensure recipients have been specified.
@@ -629,13 +943,46 @@
   def __setattr__(self, attr, value):
     """Provides additional checks on recipient fields."""
     if attr in ['to', 'cc', 'bcc']:
-      if isinstance(value, types.StringTypes):
+      if isinstance(value, basestring):
         check_email_valid(value, attr)
       else:
-        _email_check_and_list(value, attr)
+        for address in value:
+          check_email_valid(address, attr)
 
     super(EmailMessage, self).__setattr__(attr, value)
 
+  def update_from_mime_message(self, mime_message):
+    """Copy information from a mime message.
+
+    Update fields for recipients.
+
+    Args:
+      mime_message: email.Message instance to copy information from.
+    """
+    mime_message = _parse_mime_message(mime_message)
+    super(EmailMessage, self).update_from_mime_message(mime_message)
+
+    to = mime_message.get_all('to')
+    if to:
+      if len(to) == 1:
+        self.to = to[0]
+      else:
+        self.to = to
+
+    cc = mime_message.get_all('cc')
+    if cc:
+      if len(cc) == 1:
+        self.cc = cc[0]
+      else:
+        self.cc = cc
+
+    bcc = mime_message.get_all('bcc')
+    if bcc:
+      if len(bcc) == 1:
+        self.bcc = bcc[0]
+      else:
+        self.bcc = bcc
+
 
 class AdminEmailMessage(_EmailMessageBase):
   """Interface to sending email messages to all admins via the amil API.
@@ -667,3 +1014,114 @@
   """
 
   _API_CALL = 'SendToAdmins'
+  __UNUSED_PROPERTIES = set(('to', 'cc', 'bcc'))
+
+  def __setattr__(self, attr, value):
+    if attr in self.__UNUSED_PROPERTIES:
+      logging.warning('\'%s\' is not a valid property to set '
+                      'for AdminEmailMessage.  It is unused.', attr)
+    super(AdminEmailMessage, self).__setattr__(attr, value)
+
+
+class InboundEmailMessage(EmailMessage):
+  """Parsed email object as recevied from external source.
+
+  Has a date field and can store any number of additional bodies.  These
+  additional attributes make the email more flexible as required for
+  incoming mail, where the developer has less control over the content.
+
+  Example Usage:
+
+    # Read mail message from CGI input.
+    message = InboundEmailMessage(sys.stdin.read())
+    logging.info('Received email message from %s at %s',
+                 message.sender,
+                 message.date)
+    enriched_body = list(message.bodies('text/enriched'))[0]
+    ... Do something with body ...
+  """
+
+  __HEADER_PROPERTIES = {'date': 'date',
+                         'message_id': 'message-id',
+                        }
+
+  PROPERTIES = frozenset(_EmailMessageBase.PROPERTIES |
+                         set(('alternate_bodies',)) |
+                         set(__HEADER_PROPERTIES.iterkeys()))
+
+  def update_from_mime_message(self, mime_message):
+    """Update values from MIME message.
+
+    Copies over date values.
+
+    Args:
+      mime_message: email.Message instance to copy information from.
+    """
+    mime_message = _parse_mime_message(mime_message)
+    super(InboundEmailMessage, self).update_from_mime_message(mime_message)
+
+    for property, header in InboundEmailMessage.__HEADER_PROPERTIES.iteritems():
+      value = mime_message[header]
+      if value:
+        setattr(self, property, value)
+
+  def _add_body(self, content_type, payload):
+    """Add body to inbound message.
+
+    Method is overidden to handle incoming messages that have more than one
+    plain or html bodies or has any unidentified bodies.
+
+    This method will not overwrite existing html and body values.  This means
+    that when updating, the text and html bodies that are first in the MIME
+    document order are assigned to the body and html properties.
+
+    Args:
+      content_type: Content-type of additional body.
+      payload: Content of additional body.
+    """
+    if (content_type == 'text/plain' and not hasattr(self, 'body') or
+        content_type == 'text/html' and not hasattr(self, 'html')):
+      super(InboundEmailMessage, self)._add_body(content_type, payload)
+    else:
+      try:
+        alternate_bodies = self.alternate_bodies
+      except AttributeError:
+        alternate_bodies = self.alternate_bodies = [(content_type, payload)]
+      else:
+        alternate_bodies.append((content_type, payload))
+
+  def bodies(self, content_type=None):
+    """Iterate over all bodies.
+
+    Args:
+      content_type: Content type to filter on.  Allows selection of only
+        specific types of content.  Can be just the base type of the content
+        type.  For example:
+          content_type = 'text/html'  # Matches only HTML content.
+          content_type = 'text'       # Matches text of any kind.
+
+    Yields:
+      Tuple (content_type, payload) for all bodies of message, including body,
+      html and all alternate_bodies in that order.
+    """
+    main_bodies = super(InboundEmailMessage, self).bodies(content_type)
+    for payload_type, payload in main_bodies:
+      yield payload_type, payload
+
+    partial_type = bool(content_type and content_type.find('/') < 0)
+
+    try:
+      for payload_type, payload in self.alternate_bodies:
+        if content_type:
+          if partial_type:
+            match_type = payload_type.split('/')[0]
+          else:
+            match_type = payload_type
+          match = match_type == content_type
+        else:
+          match = True
+
+        if match:
+          yield payload_type, payload
+    except AttributeError:
+      pass
--- a/thirdparty/google_appengine/google/appengine/api/mail_errors.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail_errors.py	Sun Sep 06 23:31:53 2009 +0200
@@ -44,3 +44,12 @@
 
 class MissingBodyError(Error):
   """No body specified in message."""
+
+class PayloadEncodingError(Error):
+  """Unknown payload encoding."""
+
+class UnknownEncodingError(PayloadEncodingError):
+  """Raised when encoding is not known."""
+
+class UnknownCharsetError(PayloadEncodingError):
+  """Raised when charset is not known."""
--- a/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/mail_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -81,13 +81,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -187,22 +191,24 @@
     if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kFileName = 1
   kData = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "FileName",
-   "Data",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "FileName",
+    2: "Data",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -532,6 +538,10 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kSender = 1
   kReplyTo = 2
   kTo = 3
@@ -542,40 +552,31 @@
   kHtmlBody = 8
   kAttachment = 9
 
-  _TEXT = (
-   "ErrorCode",
-   "Sender",
-   "ReplyTo",
-   "To",
-   "Cc",
-   "Bcc",
-   "Subject",
-   "TextBody",
-   "HtmlBody",
-   "Attachment",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Sender",
+    2: "ReplyTo",
+    3: "To",
+    4: "Cc",
+    5: "Bcc",
+    6: "Subject",
+    7: "TextBody",
+    8: "HtmlBody",
+    9: "Attachment",
+  }, 9)
 
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.STRING,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+  }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -346,7 +346,14 @@
       return None
 
     if not response.has_stats():
-      return None
+      return {
+        STAT_HITS: 0,
+        STAT_MISSES: 0,
+        STAT_BYTE_HITS: 0,
+        STAT_ITEMS: 0,
+        STAT_BYTES: 0,
+        STAT_OLDEST_ITEM_AGES: 0,
+      }
 
     stats = response.stats()
     return {
@@ -770,15 +777,16 @@
                                        time=time, key_prefix=key_prefix,
                                        namespace=namespace)
 
-  def incr(self, key, delta=1, namespace=None):
+  def incr(self, key, delta=1, namespace=None, initial_value=None):
     """Atomically increments a key's value.
 
     Internally, the value is a unsigned 64-bit integer.  Memcache
     doesn't check 64-bit overflows.  The value, if too large, will
     wrap around.
 
-    The key must already exist in the cache to be incremented.  To
-    initialize a counter, set() it to the initial value, as an
+    Unless an initial_value is specified, the key must already exist
+    in the cache to be incremented.  To initialize a counter, either
+    specify initial_value or set() it to the initial value, as an
     ASCII decimal integer.  Future get()s of the key, post-increment,
     will still be an ASCII decimal value.
 
@@ -788,6 +796,9 @@
         defaulting to 1.
       namespace: a string specifying an optional namespace to use in
         the request.
+      initial_value: initial value to put in the cache, if it doesn't
+        already exist.  The default value, None, will not create a cache
+        entry if it doesn't already exist.
 
     Returns:
       New long integer value, or None if key was not in the cache, could not
@@ -798,9 +809,10 @@
       ValueError: If number is negative.
       TypeError: If delta isn't an int or long.
     """
-    return self._incrdecr(key, False, delta, namespace=namespace)
+    return self._incrdecr(key, False, delta, namespace=namespace,
+                          initial_value=initial_value)
 
-  def decr(self, key, delta=1, namespace=None):
+  def decr(self, key, delta=1, namespace=None, initial_value=None):
     """Atomically decrements a key's value.
 
     Internally, the value is a unsigned 64-bit integer.  Memcache
@@ -815,6 +827,9 @@
         defaulting to 1.
       namespace: a string specifying an optional namespace to use in
         the request.
+      initial_value: initial value to put in the cache, if it doesn't
+        already exist.  The default value, None, will not create a cache
+        entry if it doesn't already exist.
 
     Returns:
       New long integer value, or None if key wasn't in cache and couldn't
@@ -824,9 +839,11 @@
       ValueError: If number is negative.
       TypeError: If delta isn't an int or long.
     """
-    return self._incrdecr(key, True, delta, namespace=namespace)
+    return self._incrdecr(key, True, delta, namespace=namespace,
+                          initial_value=initial_value)
 
-  def _incrdecr(self, key, is_negative, delta, namespace=None):
+  def _incrdecr(self, key, is_negative, delta, namespace=None,
+                initial_value=None):
     """Increment or decrement a key by a provided delta.
 
     Args:
@@ -836,6 +853,9 @@
         or decrement by.
       namespace: a string specifying an optional namespace to use in
         the request.
+      initial_value: initial value to put in the cache, if it doesn't
+        already exist.  The default value, None, will not create a cache
+        entry if it doesn't already exist.
 
     Returns:
       New long integer value, or None on cache miss or network/RPC/server
@@ -859,6 +879,8 @@
       request.set_direction(MemcacheIncrementRequest.DECREMENT)
     else:
       request.set_direction(MemcacheIncrementRequest.INCREMENT)
+    if initial_value is not None:
+      request.set_initial_value(long(initial_value))
 
     try:
       self._make_sync_call('memcache', 'Increment', request, response)
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -22,7 +22,6 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import *
 class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
@@ -75,13 +74,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -183,22 +186,24 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   kname_space = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "name_space",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "name_space",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -403,30 +408,30 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kItemGroup = 1
   kItemkey = 2
   kItemvalue = 3
   kItemflags = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "Item",
-   "key",
-   "value",
-   "flags",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Item",
+    2: "key",
+    3: "value",
+    4: "flags",
+  }, 4)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.FLOAT,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -726,6 +731,10 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kItemGroup = 1
   kItemkey = 2
   kItemvalue = 3
@@ -734,34 +743,27 @@
   kItemexpiration_time = 6
   kname_space = 7
 
-  _TEXT = (
-   "ErrorCode",
-   "Item",
-   "key",
-   "value",
-   "flags",
-   "set_policy",
-   "expiration_time",
-   "name_space",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Item",
+    2: "key",
+    3: "value",
+    4: "flags",
+    5: "set_policy",
+    6: "expiration_time",
+    7: "name_space",
+  }, 7)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.FLOAT,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.FLOAT,
+    7: ProtocolBuffer.Encoder.STRING,
+  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -850,18 +852,21 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kset_status = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "set_status",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "set_status",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1063,30 +1068,30 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kItemGroup = 1
   kItemkey = 2
   kItemdelete_time = 3
   kname_space = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "Item",
-   "key",
-   "delete_time",
-   "name_space",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Item",
+    2: "key",
+    3: "delete_time",
+    4: "name_space",
+  }, 4)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.FLOAT,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1173,18 +1178,21 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kdelete_status = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "delete_status",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "delete_status",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1209,6 +1217,8 @@
   delta_ = 1
   has_direction_ = 0
   direction_ = 1
+  has_initial_value_ = 0
+  initial_value_ = 0
 
   def __init__(self, contents=None):
     if contents is not None: self.MergeFromString(contents)
@@ -1265,6 +1275,19 @@
 
   def has_direction(self): return self.has_direction_
 
+  def initial_value(self): return self.initial_value_
+
+  def set_initial_value(self, x):
+    self.has_initial_value_ = 1
+    self.initial_value_ = x
+
+  def clear_initial_value(self):
+    if self.has_initial_value_:
+      self.has_initial_value_ = 0
+      self.initial_value_ = 0
+
+  def has_initial_value(self): return self.has_initial_value_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -1272,6 +1295,7 @@
     if (x.has_name_space()): self.set_name_space(x.name_space())
     if (x.has_delta()): self.set_delta(x.delta())
     if (x.has_direction()): self.set_direction(x.direction())
+    if (x.has_initial_value()): self.set_initial_value(x.initial_value())
 
   def Equals(self, x):
     if x is self: return 1
@@ -1283,6 +1307,8 @@
     if self.has_delta_ and self.delta_ != x.delta_: return 0
     if self.has_direction_ != x.has_direction_: return 0
     if self.has_direction_ and self.direction_ != x.direction_: return 0
+    if self.has_initial_value_ != x.has_initial_value_: return 0
+    if self.has_initial_value_ and self.initial_value_ != x.initial_value_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -1299,6 +1325,7 @@
     if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
     if (self.has_delta_): n += 1 + self.lengthVarInt64(self.delta_)
     if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+    if (self.has_initial_value_): n += 1 + self.lengthVarInt64(self.initial_value_)
     return n + 1
 
   def Clear(self):
@@ -1306,6 +1333,7 @@
     self.clear_name_space()
     self.clear_delta()
     self.clear_direction()
+    self.clear_initial_value()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -1319,6 +1347,9 @@
     if (self.has_name_space_):
       out.putVarInt32(34)
       out.putPrefixedString(self.name_space_)
+    if (self.has_initial_value_):
+      out.putVarInt32(40)
+      out.putVarUint64(self.initial_value_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -1335,6 +1366,9 @@
       if tt == 34:
         self.set_name_space(d.getPrefixedString())
         continue
+      if tt == 40:
+        self.set_initial_value(d.getVarUint64())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -1345,32 +1379,36 @@
     if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
     if self.has_delta_: res+=prefix+("delta: %s\n" % self.DebugFormatInt64(self.delta_))
     if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+    if self.has_initial_value_: res+=prefix+("initial_value: %s\n" % self.DebugFormatInt64(self.initial_value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   kname_space = 4
   kdelta = 2
   kdirection = 3
-
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "delta",
-   "direction",
-   "name_space",
-  )
+  kinitial_value = 5
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "delta",
+    3: "direction",
+    4: "name_space",
+    5: "initial_value",
+  }, 5)
 
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1437,18 +1475,21 @@
     if self.has_new_value_: res+=prefix+("new_value: %s\n" % self.DebugFormatInt64(self.new_value_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   knew_value = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "new_value",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "new_value",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1492,13 +1533,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1542,13 +1587,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1592,13 +1641,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1817,6 +1870,10 @@
     if self.has_oldest_item_age_: res+=prefix+("oldest_item_age: %s\n" % self.DebugFormatFixed32(self.oldest_item_age_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   khits = 1
   kmisses = 2
   kbyte_hits = 3
@@ -1824,31 +1881,25 @@
   kbytes = 5
   koldest_item_age = 6
 
-  _TEXT = (
-   "ErrorCode",
-   "hits",
-   "misses",
-   "byte_hits",
-   "items",
-   "bytes",
-   "oldest_item_age",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "hits",
+    2: "misses",
+    3: "byte_hits",
+    4: "items",
+    5: "bytes",
+    6: "oldest_item_age",
+  }, 6)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.FLOAT,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+    6: ProtocolBuffer.Encoder.FLOAT,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1929,18 +1980,21 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kstats = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "stats",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "stats",
+  }, 1)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/memcache/memcache_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -229,7 +229,16 @@
     key = request.key()
     entry = self._GetKey(namespace, key)
     if entry is None:
-      return
+      if not request.has_initial_value():
+        return
+      if namespace not in self._the_cache:
+        self._the_cache[namespace] = {}
+      self._the_cache[namespace][key] = CacheEntry(str(request.initial_value()),
+                                                   expiration=0,
+                                                   flags=0,
+                                                   gettime=self._gettime)
+      entry = self._GetKey(namespace, key)
+      assert entry is not None
 
     try:
       old_value = long(entry.value)
--- a/thirdparty/google_appengine/google/appengine/api/namespace_manager/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/namespace_manager/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -27,8 +27,8 @@
 import os
 
 ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'
+ENV_CURRENT_NAMESPACE = '__INTERNAL_CURRENT_NAMESPACE'
 
-__default_namespace = None
 
 def set_request_namespace(namespace):
   """Set the default namespace to use for future calls, for this request only.
@@ -37,20 +37,28 @@
     namespace: A string naming the new namespace to use. The empty
       string specifies the root namespace for this app.
   """
-  global __default_namespace
-  __default_namespace = namespace
+  os.environ[ENV_CURRENT_NAMESPACE] = namespace
 
 
 def get_request_namespace():
-  """Get the name of the current default namespace. The empty string
-  indicates that the root namespace is the default."""
-  global __default_namespace
-  if __default_namespace is None:
+  """Get the name of the current default namespace.
+
+  The empty string indicates that the root namespace is the default.
+  """
+  return os.getenv(ENV_CURRENT_NAMESPACE, '')
+
+
+def _enable_request_namespace():
+  """Automatically enable namespace to default for domain.
+
+  Calling this function will automatically default the namespace to the
+  chosen Google Apps domain for the current request.
+  """
+  if ENV_CURRENT_NAMESPACE not in os.environ:
     if ENV_DEFAULT_NAMESPACE in os.environ:
-      __default_namespace = os.environ[ENV_DEFAULT_NAMESPACE]
+      os.environ[ENV_CURRENT_NAMESPACE] = os.environ[ENV_DEFAULT_NAMESPACE]
     else:
-      __default_namespace = ''
-  return __default_namespace
+      os.environ[ENV_CURRENT_NAMESPACE] = ''
 
 
 def _add_name_space(request, namespace=None):
--- a/thirdparty/google_appengine/google/appengine/api/queueinfo.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/queueinfo.py	Sun Sep 06 23:31:53 2009 +0200
@@ -21,20 +21,22 @@
 for an application. Supports loading the records from queue.yaml.
 
 A queue has two required parameters and one optional one. The required
-parameters are 'name' (must be unique for an appid) and 'rate' (the
-rate at which jobs in the queue are run). There is an optional 'bucket_size'
-that will allow tokens to be 'saved up' and bucket_size. Rate and bucket_size rate are
-expressed as number/unit, with number being an int or a float, and unit being
-one of 's' (seconds), 'm' (minutes), 'h' (hours) or 'd' (days).
+parameters are 'name' (must be unique for an appid) and 'rate' (the rate
+at which jobs in the queue are run). There is an optional parameter
+'bucket_size' that will allow tokens to be 'saved up' (for more on the
+algorithm, see http://en.wikipedia.org/wiki/Token_Bucket). rate is expressed
+as number/unit, with number being an int or a float, and unit being one of
+'s' (seconds), 'm' (minutes), 'h' (hours) or 'd' (days). bucket_size is
+an integer.
 
-An example of the use of bucket_size rate: the free email quota is 2000/d, and the
-maximum you can send in a single minute is 11. So we can define a queue for
-sending email like this:
+An example of the use of bucket_size rate: the free email quota is 2000/d,
+and the maximum you can send in a single minute is 11. So we can define a
+queue for sending email like this:
 
 queue:
 - name: mail_queue
   rate: 2000/d
-  bucket_size: 10/m
+  bucket_size: 10
 
 If this queue had been idle for a while before some jobs were submitted to it,
 the first 10 jobs submitted would be run immediately, then subsequent ones
@@ -49,7 +51,7 @@
 from google.appengine.api import yaml_object
 
 _NAME_REGEX = r'^[A-Za-z0-9-]{0,499}$'
-_RATE_REGEX = r'^[0-9]+(\.[0-9]+)?/[smhd]'
+_RATE_REGEX = r'^(0|[0-9]+(\.[0-9]*)?/[smhd])'
 
 QUEUE = 'queue'
 
@@ -102,7 +104,7 @@
 
 
 def ParseRate(rate):
-  """Parses a rate string in the form number/unit.
+  """Parses a rate string in the form number/unit, or the literal 0.
 
   The unit is one of s (seconds), m (minutes), h (hours) or d (days).
 
@@ -115,6 +117,8 @@
   Raises:
     MalformedQueueConfiguration: if the rate is invalid
   """
+  if rate == "0":
+    return 0.0
   elements = rate.split('/')
   if len(elements) != 2:
     raise MalformedQueueConfiguration('Rate "%s" is invalid.' % rate)
--- a/thirdparty/google_appengine/google/appengine/api/quota.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/quota.py	Sun Sep 06 23:31:53 2009 +0200
@@ -37,3 +37,35 @@
   if _apphosting_runtime___python__apiproxy:
     return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
   return 0
+
+def get_request_api_cpu_usage():
+  """Get the amount of CPU used so far by API calls during the current request.
+
+  Returns the number of megacycles used so far by API calls for the current
+  request. Does not include CPU used by code in the request itself.
+
+  Does nothing when used in the dev_appserver.
+  """
+
+  if _apphosting_runtime___python__apiproxy:
+    return _apphosting_runtime___python__apiproxy.get_request_api_cpu_usage()
+  return 0
+
+MCYCLES_PER_SECOND = 1200.0
+"""Megacycles to CPU seconds.  Convert by using a 1.2 GHz 64-bit x86 CPU."""
+
+def megacycles_to_cpu_seconds(mcycles):
+  """Convert an input value in megacycles to CPU-seconds.
+
+  Returns a double representing the CPU-seconds the input megacycle value
+  converts to.
+  """
+  return mcycles / MCYCLES_PER_SECOND
+
+def cpu_seconds_to_megacycles(cpu_secs):
+  """Convert an input value in CPU-seconds to megacycles.
+
+  Returns an integer representing the megacycles the input CPU-seconds value
+  converts to.
+  """
+  return int(cpu_secs * MCYCLES_PER_SECOND)
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -22,7 +22,6 @@
 __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
                    unusednames=printElemNumber,debug_strs no-special"""
 
-from google.appengine.api.api_base_pb import *
 class URLFetchServiceError(ProtocolBuffer.ProtocolMessage):
 
   OK           =    0
@@ -83,13 +82,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -426,6 +429,10 @@
     if self.has_deadline_: res+=prefix+("Deadline: %s\n" % self.DebugFormat(self.deadline_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kMethod = 1
   kUrl = 2
   kHeaderGroup = 3
@@ -435,37 +442,29 @@
   kFollowRedirects = 7
   kDeadline = 8
 
-  _TEXT = (
-   "ErrorCode",
-   "Method",
-   "Url",
-   "Header",
-   "Key",
-   "Value",
-   "Payload",
-   "FollowRedirects",
-   "Deadline",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Method",
+    2: "Url",
+    3: "Header",
+    4: "Key",
+    5: "Value",
+    6: "Payload",
+    7: "FollowRedirects",
+    8: "Deadline",
+  }, 8)
 
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STARTGROUP,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.DOUBLE,
+  }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -781,6 +780,10 @@
     if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kContent = 1
   kStatusCode = 2
   kHeaderGroup = 3
@@ -790,37 +793,29 @@
   kExternalBytesSent = 7
   kExternalBytesReceived = 8
 
-  _TEXT = (
-   "ErrorCode",
-   "Content",
-   "StatusCode",
-   "Header",
-   "Key",
-   "Value",
-   "ContentWasTruncated",
-   "ExternalBytesSent",
-   "ExternalBytesReceived",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Content",
+    2: "StatusCode",
+    3: "Header",
+    4: "Key",
+    5: "Value",
+    6: "ContentWasTruncated",
+    7: "ExternalBytesSent",
+    8: "ExternalBytesReceived",
+  }, 8)
 
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STARTGROUP,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.NUMERIC,
+  }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -55,7 +55,6 @@
 _UNTRUSTED_REQUEST_HEADERS = frozenset([
   'content-length',
   'host',
-  'referer',
   'vary',
   'via',
   'x-forwarded-for',
@@ -168,7 +167,6 @@
       adjusted_headers = {
           'User-Agent':
           'AppEngine-Google; (+http://code.google.com/appengine)',
-          'Referer': 'http://localhost/',
           'Host': host,
           'Accept-Encoding': 'gzip',
       }
@@ -212,7 +210,10 @@
           socket.setdefaulttimeout(deadline)
           connection.request(method, full_path, payload, adjusted_headers)
           http_response = connection.getresponse()
-          http_response_data = http_response.read()
+          if method == 'HEAD':
+            http_response_data = ''
+          else:
+            http_response_data = http_response.read()
         finally:
           socket.setdefaulttimeout(orig_timeout)
           connection.close()
@@ -239,7 +240,7 @@
               header_value == 'gzip'):
             continue
           if header_key.lower() == 'content-length':
-            header_value = len(response.content())
+            header_value = str(len(response.content()))
           header_proto = response.add_header()
           header_proto.set_key(header_key)
           header_proto.set_value(header_value)
--- a/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -77,15 +77,415 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLRequest(ProtocolBuffer.ProtocolMessage):
+  has_destination_url_ = 0
+  destination_url_ = ""
+  has_auth_domain_ = 0
+  auth_domain_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def destination_url(self): return self.destination_url_
+
+  def set_destination_url(self, x):
+    self.has_destination_url_ = 1
+    self.destination_url_ = x
+
+  def clear_destination_url(self):
+    if self.has_destination_url_:
+      self.has_destination_url_ = 0
+      self.destination_url_ = ""
+
+  def has_destination_url(self): return self.has_destination_url_
+
+  def auth_domain(self): return self.auth_domain_
+
+  def set_auth_domain(self, x):
+    self.has_auth_domain_ = 1
+    self.auth_domain_ = x
+
+  def clear_auth_domain(self):
+    if self.has_auth_domain_:
+      self.has_auth_domain_ = 0
+      self.auth_domain_ = ""
+
+  def has_auth_domain(self): return self.has_auth_domain_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+    if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_destination_url_ != x.has_destination_url_: return 0
+    if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+    if self.has_auth_domain_ != x.has_auth_domain_: return 0
+    if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_destination_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: destination_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.destination_url_))
+    if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_destination_url()
+    self.clear_auth_domain()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.destination_url_)
+    if (self.has_auth_domain_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.auth_domain_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_destination_url(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_auth_domain(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+    if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kdestination_url = 1
+  kauth_domain = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "destination_url",
+    2: "auth_domain",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLResponse(ProtocolBuffer.ProtocolMessage):
+  has_login_url_ = 0
+  login_url_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def login_url(self): return self.login_url_
+
+  def set_login_url(self, x):
+    self.has_login_url_ = 1
+    self.login_url_ = x
+
+  def clear_login_url(self):
+    if self.has_login_url_:
+      self.has_login_url_ = 0
+      self.login_url_ = ""
+
+  def has_login_url(self): return self.has_login_url_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_login_url()): self.set_login_url(x.login_url())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_login_url_ != x.has_login_url_: return 0
+    if self.has_login_url_ and self.login_url_ != x.login_url_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_login_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: login_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.login_url_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_login_url()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.login_url_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_login_url(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_login_url_: res+=prefix+("login_url: %s\n" % self.DebugFormatString(self.login_url_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  klogin_url = 1
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "login_url",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLRequest(ProtocolBuffer.ProtocolMessage):
+  has_destination_url_ = 0
+  destination_url_ = ""
+  has_auth_domain_ = 0
+  auth_domain_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def destination_url(self): return self.destination_url_
+
+  def set_destination_url(self, x):
+    self.has_destination_url_ = 1
+    self.destination_url_ = x
+
+  def clear_destination_url(self):
+    if self.has_destination_url_:
+      self.has_destination_url_ = 0
+      self.destination_url_ = ""
+
+  def has_destination_url(self): return self.has_destination_url_
+
+  def auth_domain(self): return self.auth_domain_
+
+  def set_auth_domain(self, x):
+    self.has_auth_domain_ = 1
+    self.auth_domain_ = x
+
+  def clear_auth_domain(self):
+    if self.has_auth_domain_:
+      self.has_auth_domain_ = 0
+      self.auth_domain_ = ""
+
+  def has_auth_domain(self): return self.has_auth_domain_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+    if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_destination_url_ != x.has_destination_url_: return 0
+    if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+    if self.has_auth_domain_ != x.has_auth_domain_: return 0
+    if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_destination_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: destination_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.destination_url_))
+    if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_destination_url()
+    self.clear_auth_domain()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.destination_url_)
+    if (self.has_auth_domain_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.auth_domain_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_destination_url(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_auth_domain(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+    if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kdestination_url = 1
+  kauth_domain = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "destination_url",
+    2: "auth_domain",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLResponse(ProtocolBuffer.ProtocolMessage):
+  has_logout_url_ = 0
+  logout_url_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def logout_url(self): return self.logout_url_
+
+  def set_logout_url(self, x):
+    self.has_logout_url_ = 1
+    self.logout_url_ = x
+
+  def clear_logout_url(self):
+    if self.has_logout_url_:
+      self.has_logout_url_ = 0
+      self.logout_url_ = ""
+
+  def has_logout_url(self): return self.has_logout_url_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_logout_url()): self.set_logout_url(x.logout_url())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_logout_url_ != x.has_logout_url_: return 0
+    if self.has_logout_url_ and self.logout_url_ != x.logout_url_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_logout_url_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: logout_url not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.logout_url_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_logout_url()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.logout_url_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_logout_url(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_logout_url_: res+=prefix+("logout_url: %s\n" % self.DebugFormatString(self.logout_url_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  klogout_url = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "logout_url",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['UserServiceError']
+__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse']
--- a/thirdparty/google_appengine/google/appengine/api/user_service_stub.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/user_service_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -65,9 +65,9 @@
       response: the login URL; a base.StringProto
     """
     self.__num_requests += 1
-    response.set_value(
+    response.set_login_url(
         self._login_url %
-        urllib.quote(self._AddHostToContinueURL(request.value())))
+        urllib.quote(self._AddHostToContinueURL(request.destination_url())))
 
   def _Dynamic_CreateLogoutURL(self, request, response):
     """Trivial implementation of UserService.CreateLogoutURL().
@@ -77,9 +77,9 @@
       response: the logout URL; a base.StringProto
     """
     self.__num_requests += 1
-    response.set_value(
+    response.set_logout_url(
         self._logout_url %
-        urllib.quote(self._AddHostToContinueURL(request.value())))
+        urllib.quote(self._AddHostToContinueURL(request.destination_url())))
 
   def _AddHostToContinueURL(self, continue_url):
     """Adds the request host to the continue url if no host is specified.
--- a/thirdparty/google_appengine/google/appengine/api/users.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/users.py	Sun Sep 06 23:31:53 2009 +0200
@@ -162,9 +162,9 @@
   Returns:
     string
   """
-  req = user_service_pb.StringProto()
-  resp = user_service_pb.StringProto()
-  req.set_value(dest_url)
+  req = user_service_pb.CreateLoginURLRequest()
+  resp = user_service_pb.CreateLoginURLResponse()
+  req.set_destination_url(dest_url)
   try:
     apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
   except apiproxy_errors.ApplicationError, e:
@@ -176,7 +176,7 @@
       raise NotAllowedError
     else:
       raise e
-  return resp.value()
+  return resp.login_url()
 
 CreateLoginURL = create_login_url
 
@@ -192,9 +192,9 @@
   Returns:
     string
   """
-  req = user_service_pb.StringProto()
-  resp = user_service_pb.StringProto()
-  req.set_value(dest_url)
+  req = user_service_pb.CreateLogoutURLRequest()
+  resp = user_service_pb.CreateLogoutURLResponse()
+  req.set_destination_url(dest_url)
   try:
     apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
   except apiproxy_errors.ApplicationError, e:
@@ -203,7 +203,7 @@
       raise RedirectTooLongError
     else:
       raise e
-  return resp.value()
+  return resp.logout_url()
 
 CreateLogoutURL = create_logout_url
 
--- a/thirdparty/google_appengine/google/appengine/api/validation.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/api/validation.py	Sun Sep 06 23:31:53 2009 +0200
@@ -919,7 +919,9 @@
                             'but found \'%s\'.' % value)
 
     for item in value:
-      if not isinstance(item, self.constructor):
+      if isinstance(self.constructor, Validator):
+        item = self.constructor.Validate(item)
+      elif not isinstance(item, self.constructor):
         raise ValidationError('Repeated items must be %s, but found \'%s\'.'
                               % (str(self.constructor), str(item)))
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/xmpp/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,332 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP API.
+
+This module allows AppEngine apps to interact with a bot representing that app
+on the Google Talk network.
+
+Functions defined in this module:
+  get_presence: Gets the presence for a JID.
+  send_message: Sends a chat message to any number of JIDs.
+  send_invite: Sends an invitation to chat to a JID.
+
+Classes defined in this module:
+  Message: A class to encapsulate received messages.
+"""
+
+
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api.xmpp import xmpp_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+NO_ERROR    = xmpp_service_pb.XmppMessageResponse.NO_ERROR
+INVALID_JID = xmpp_service_pb.XmppMessageResponse.INVALID_JID
+OTHER_ERROR = xmpp_service_pb.XmppMessageResponse.OTHER_ERROR
+
+
+MESSAGE_TYPE_NONE = ""
+MESSAGE_TYPE_CHAT = "chat"
+MESSAGE_TYPE_ERROR = "error"
+MESSAGE_TYPE_GROUPCHAT = "groupchat"
+MESSAGE_TYPE_HEADLINE = "headline"
+MESSAGE_TYPE_NORMAL = "normal"
+
+_VALID_MESSAGE_TYPES = frozenset([MESSAGE_TYPE_NONE, MESSAGE_TYPE_CHAT,
+                                  MESSAGE_TYPE_ERROR, MESSAGE_TYPE_GROUPCHAT,
+                                  MESSAGE_TYPE_HEADLINE, MESSAGE_TYPE_NORMAL])
+
+
+class Error(Exception):
+  """Base error class for this module."""
+
+
+class InvalidJidError(Error):
+  """Error that indicates a request for an invalid JID."""
+
+
+class InvalidTypeError(Error):
+  """Error that indicates a send message request has an invalid type."""
+
+
+class InvalidXmlError(Error):
+  """Error that indicates a send message request has invalid XML."""
+
+
+class NoBodyError(Error):
+  """Error that indicates a send message request has no body."""
+
+
+class InvalidMessageError(Error):
+  """Error that indicates a received message was invalid or incomplete."""
+
+
+def get_presence(jid, from_jid=None):
+  """Gets the presence for a JID.
+
+  Args:
+    jid: The JID of the contact whose presence is requested.
+    from_jid: The optional custom JID to use for sending. Currently, the default
+      is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+      of the form <anything>@<appid>.appspotchat.com.
+
+  Returns:
+    bool, Whether the user is online.
+
+  Raises:
+    InvalidJidError if any of the JIDs passed are invalid.
+    Error if an unspecified error happens processing the request.
+  """
+  if not jid:
+    raise InvalidJidError()
+
+  request = xmpp_service_pb.PresenceRequest()
+  response = xmpp_service_pb.PresenceResponse()
+
+  request.set_jid(_to_str(jid))
+  if from_jid:
+    request.set_from_jid(_to_str(from_jid))
+
+  try:
+    apiproxy_stub_map.MakeSyncCall("xmpp",
+                                   "GetPresence",
+                                   request,
+                                   response)
+  except apiproxy_errors.ApplicationError, e:
+    if (e.application_error ==
+        xmpp_service_pb.XmppServiceError.INVALID_JID):
+      raise InvalidJidError()
+    else:
+      raise Error()
+
+  return bool(response.is_available())
+
+
+def send_invite(jid, from_jid=None):
+  """Sends an invitation to chat to a JID.
+
+  Args:
+    jid: The JID of the contact to invite.
+    from_jid: The optional custom JID to use for sending. Currently, the default
+      is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+      of the form <anything>@<appid>.appspotchat.com.
+
+  Raises:
+    InvalidJidError if the JID passed is invalid.
+    Error if an unspecified error happens processing the request.
+  """
+  if not jid:
+    raise InvalidJidError()
+
+  request = xmpp_service_pb.XmppInviteRequest()
+  response = xmpp_service_pb.XmppInviteResponse()
+
+  request.set_jid(_to_str(jid))
+  if from_jid:
+    request.set_from_jid(_to_str(from_jid))
+
+  try:
+    apiproxy_stub_map.MakeSyncCall("xmpp",
+                                   "SendInvite",
+                                   request,
+                                   response)
+  except apiproxy_errors.ApplicationError, e:
+    if (e.application_error ==
+        xmpp_service_pb.XmppServiceError.INVALID_JID):
+      raise InvalidJidError()
+    else:
+      raise Error()
+
+  return
+
+
+def send_message(jids, body, from_jid=None, message_type=MESSAGE_TYPE_CHAT,
+                 raw_xml=False):
+  """Sends a chat message to a list of JIDs.
+
+  Args:
+    jids: A list of JIDs to send the message to, or a single JID to send the
+      message to.
+    from_jid: The optional custom JID to use for sending. Currently, the default
+      is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+      of the form <anything>@<appid>.appspotchat.com.
+    body: The body of the message.
+    message_type: Optional type of the message. Should be one of the types
+      specified in RFC 3921, section 2.1.1. An empty string will result in a
+      message stanza without a type attribute. For convenience, all of the
+      valid types are in the MESSAGE_TYPE_* constants in this file. The
+      default is MESSAGE_TYPE_CHAT. Anything else will throw an exception.
+    raw_xml: Optionally specifies that the body should be interpreted as XML. If
+      this is false, the contents of the body will be escaped and placed inside
+      of a body element inside of the message. If this is true, the contents
+      will be made children of the message.
+
+  Returns:
+    list, A list of statuses, one for each JID, corresponding to the result of
+      sending the message to that JID. Or, if a single JID was passed in,
+      returns the status directly.
+
+  Raises:
+    InvalidJidError if there is no valid JID in the list.
+    InvalidTypeError if the type argument is invalid.
+    InvalidXmlError if the body is malformed XML and raw_xml is True.
+    NoBodyError if there is no body.
+    Error if another error occurs processing the request.
+  """
+  request = xmpp_service_pb.XmppMessageRequest()
+  response = xmpp_service_pb.XmppMessageResponse()
+
+  if not body:
+    raise NoBodyError()
+
+  if not jids:
+    raise InvalidJidError()
+
+  if not message_type in _VALID_MESSAGE_TYPES:
+    raise InvalidTypeError()
+
+  single_jid = False
+  if isinstance(jids, basestring):
+    single_jid = True
+    jids = [jids]
+
+  for jid in jids:
+    if not jid:
+      raise InvalidJidError()
+    request.add_jid(_to_str(jid))
+
+  request.set_body(_to_str(body))
+  request.set_type(_to_str(message_type))
+  request.set_raw_xml(raw_xml)
+  if from_jid:
+    request.set_from_jid(_to_str(from_jid))
+
+  try:
+    apiproxy_stub_map.MakeSyncCall("xmpp",
+                                   "SendMessage",
+                                   request,
+                                   response)
+  except apiproxy_errors.ApplicationError, e:
+    if (e.application_error ==
+        xmpp_service_pb.XmppServiceError.INVALID_JID):
+      raise InvalidJidError()
+    elif (e.application_error ==
+          xmpp_service_pb.XmppServiceError.INVALID_TYPE):
+      raise InvalidTypeError()
+    elif (e.application_error ==
+          xmpp_service_pb.XmppServiceError.INVALID_XML):
+      raise InvalidXmlError()
+    elif (e.application_error ==
+          xmpp_service_pb.XmppServiceError.NO_BODY):
+      raise NoBodyError()
+    raise Error()
+
+  if single_jid:
+    return response.status_list()[0]
+  return response.status_list()
+
+
+class Message(object):
+  """Encapsulates an XMPP message received by the application."""
+
+  def __init__(self, vars):
+    """Constructs a new XMPP Message from an HTTP request.
+
+    Args:
+      vars: A dict-like object to extract message arguments from.
+    """
+    try:
+      self.__sender = vars["from"]
+      self.__to = vars["to"]
+      self.__body = vars["body"]
+    except KeyError, e:
+      raise InvalidMessageError(e[0])
+    self.__command = None
+    self.__arg = None
+
+  @property
+  def sender(self):
+    return self.__sender
+
+  @property
+  def to(self):
+    return self.__to
+
+  @property
+  def body(self):
+    return self.__body
+
+  def __parse_command(self):
+    if self.__arg != None:
+      return
+
+    body = self.__body
+    if body.startswith('\\'):
+      body = '/' + body[1:]
+
+    self.__arg = ''
+    if body.startswith('/'):
+      parts = body.split(' ', 1)
+      self.__command = parts[0][1:]
+      if len(parts) > 1:
+        self.__arg = parts[1].strip()
+    else:
+      self.__arg = self.__body.strip()
+
+  @property
+  def command(self):
+    self.__parse_command()
+    return self.__command
+
+  @property
+  def arg(self):
+    self.__parse_command()
+    return self.__arg
+
+  def reply(self, body, message_type=MESSAGE_TYPE_CHAT, raw_xml=False,
+            send_message=send_message):
+    """Convenience function to reply to a message.
+
+    Args:
+      body: str: The body of the message
+      message_type, raw_xml: As per send_message.
+      send_message: Used for testing.
+
+    Returns:
+      A status code as per send_message.
+
+    Raises:
+      See send_message.
+    """
+    return send_message([self.sender], body, from_jid=self.to,
+                        message_type=message_type, raw_xml=raw_xml)
+
+
+def _to_str(value):
+  """Helper function to make sure unicode values converted to utf-8
+
+  Args:
+    value: str or unicode to convert to utf-8.
+
+  Returns:
+    UTF-8 encoded str of value, otherwise value unchanged.
+  """
+  if isinstance(value, unicode):
+    return value.encode('utf-8')
+  return value
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,826 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+                   unusednames=printElemNumber,debug_strs no-special"""
+
+class XmppServiceError(ProtocolBuffer.ProtocolMessage):
+
+  UNSPECIFIED_ERROR =    1
+  INVALID_JID  =    2
+  NO_BODY      =    3
+  INVALID_XML  =    4
+  INVALID_TYPE =    5
+
+  _ErrorCode_NAMES = {
+    1: "UNSPECIFIED_ERROR",
+    2: "INVALID_JID",
+    3: "NO_BODY",
+    4: "INVALID_XML",
+    5: "INVALID_TYPE",
+  }
+
+  def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+  ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n + 0
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class PresenceRequest(ProtocolBuffer.ProtocolMessage):
+  has_jid_ = 0
+  jid_ = ""
+  has_from_jid_ = 0
+  from_jid_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def jid(self): return self.jid_
+
+  def set_jid(self, x):
+    self.has_jid_ = 1
+    self.jid_ = x
+
+  def clear_jid(self):
+    if self.has_jid_:
+      self.has_jid_ = 0
+      self.jid_ = ""
+
+  def has_jid(self): return self.has_jid_
+
+  def from_jid(self): return self.from_jid_
+
+  def set_from_jid(self, x):
+    self.has_from_jid_ = 1
+    self.from_jid_ = x
+
+  def clear_from_jid(self):
+    if self.has_from_jid_:
+      self.has_from_jid_ = 0
+      self.from_jid_ = ""
+
+  def has_from_jid(self): return self.has_from_jid_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_jid()): self.set_jid(x.jid())
+    if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_jid_ != x.has_jid_: return 0
+    if self.has_jid_ and self.jid_ != x.jid_: return 0
+    if self.has_from_jid_ != x.has_from_jid_: return 0
+    if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_jid_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: jid not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.jid_))
+    if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_jid()
+    self.clear_from_jid()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.jid_)
+    if (self.has_from_jid_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.from_jid_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_jid(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_from_jid(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+    if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kjid = 1
+  kfrom_jid = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "jid",
+    2: "from_jid",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class PresenceResponse(ProtocolBuffer.ProtocolMessage):
+
+  NORMAL       =    0
+  AWAY         =    1
+  DO_NOT_DISTURB =    2
+  CHAT         =    3
+  EXTENDED_AWAY =    4
+
+  _SHOW_NAMES = {
+    0: "NORMAL",
+    1: "AWAY",
+    2: "DO_NOT_DISTURB",
+    3: "CHAT",
+    4: "EXTENDED_AWAY",
+  }
+
+  def SHOW_Name(cls, x): return cls._SHOW_NAMES.get(x, "")
+  SHOW_Name = classmethod(SHOW_Name)
+
+  has_is_available_ = 0
+  is_available_ = 0
+  has_presence_ = 0
+  presence_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def is_available(self): return self.is_available_
+
+  def set_is_available(self, x):
+    self.has_is_available_ = 1
+    self.is_available_ = x
+
+  def clear_is_available(self):
+    if self.has_is_available_:
+      self.has_is_available_ = 0
+      self.is_available_ = 0
+
+  def has_is_available(self): return self.has_is_available_
+
+  def presence(self): return self.presence_
+
+  def set_presence(self, x):
+    self.has_presence_ = 1
+    self.presence_ = x
+
+  def clear_presence(self):
+    if self.has_presence_:
+      self.has_presence_ = 0
+      self.presence_ = 0
+
+  def has_presence(self): return self.has_presence_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_is_available()): self.set_is_available(x.is_available())
+    if (x.has_presence()): self.set_presence(x.presence())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_is_available_ != x.has_is_available_: return 0
+    if self.has_is_available_ and self.is_available_ != x.is_available_: return 0
+    if self.has_presence_ != x.has_presence_: return 0
+    if self.has_presence_ and self.presence_ != x.presence_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_is_available_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: is_available not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_presence_): n += 1 + self.lengthVarInt64(self.presence_)
+    return n + 2
+
+  def Clear(self):
+    self.clear_is_available()
+    self.clear_presence()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putBoolean(self.is_available_)
+    if (self.has_presence_):
+      out.putVarInt32(16)
+      out.putVarInt32(self.presence_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_is_available(d.getBoolean())
+        continue
+      if tt == 16:
+        self.set_presence(d.getVarInt32())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_is_available_: res+=prefix+("is_available: %s\n" % self.DebugFormatBool(self.is_available_))
+    if self.has_presence_: res+=prefix+("presence: %s\n" % self.DebugFormatInt32(self.presence_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kis_available = 1
+  kpresence = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "is_available",
+    2: "presence",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppMessageRequest(ProtocolBuffer.ProtocolMessage):
+  has_body_ = 0
+  body_ = ""
+  has_raw_xml_ = 0
+  raw_xml_ = 0
+  has_type_ = 0
+  type_ = "chat"
+  has_from_jid_ = 0
+  from_jid_ = ""
+
+  def __init__(self, contents=None):
+    self.jid_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def jid_size(self): return len(self.jid_)
+  def jid_list(self): return self.jid_
+
+  def jid(self, i):
+    return self.jid_[i]
+
+  def set_jid(self, i, x):
+    self.jid_[i] = x
+
+  def add_jid(self, x):
+    self.jid_.append(x)
+
+  def clear_jid(self):
+    self.jid_ = []
+
+  def body(self): return self.body_
+
+  def set_body(self, x):
+    self.has_body_ = 1
+    self.body_ = x
+
+  def clear_body(self):
+    if self.has_body_:
+      self.has_body_ = 0
+      self.body_ = ""
+
+  def has_body(self): return self.has_body_
+
+  def raw_xml(self): return self.raw_xml_
+
+  def set_raw_xml(self, x):
+    self.has_raw_xml_ = 1
+    self.raw_xml_ = x
+
+  def clear_raw_xml(self):
+    if self.has_raw_xml_:
+      self.has_raw_xml_ = 0
+      self.raw_xml_ = 0
+
+  def has_raw_xml(self): return self.has_raw_xml_
+
+  def type(self): return self.type_
+
+  def set_type(self, x):
+    self.has_type_ = 1
+    self.type_ = x
+
+  def clear_type(self):
+    if self.has_type_:
+      self.has_type_ = 0
+      self.type_ = "chat"
+
+  def has_type(self): return self.has_type_
+
+  def from_jid(self): return self.from_jid_
+
+  def set_from_jid(self, x):
+    self.has_from_jid_ = 1
+    self.from_jid_ = x
+
+  def clear_from_jid(self):
+    if self.has_from_jid_:
+      self.has_from_jid_ = 0
+      self.from_jid_ = ""
+
+  def has_from_jid(self): return self.has_from_jid_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.jid_size()): self.add_jid(x.jid(i))
+    if (x.has_body()): self.set_body(x.body())
+    if (x.has_raw_xml()): self.set_raw_xml(x.raw_xml())
+    if (x.has_type()): self.set_type(x.type())
+    if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.jid_) != len(x.jid_): return 0
+    for e1, e2 in zip(self.jid_, x.jid_):
+      if e1 != e2: return 0
+    if self.has_body_ != x.has_body_: return 0
+    if self.has_body_ and self.body_ != x.body_: return 0
+    if self.has_raw_xml_ != x.has_raw_xml_: return 0
+    if self.has_raw_xml_ and self.raw_xml_ != x.raw_xml_: return 0
+    if self.has_type_ != x.has_type_: return 0
+    if self.has_type_ and self.type_ != x.type_: return 0
+    if self.has_from_jid_ != x.has_from_jid_: return 0
+    if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_body_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: body not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 1 * len(self.jid_)
+    for i in xrange(len(self.jid_)): n += self.lengthString(len(self.jid_[i]))
+    n += self.lengthString(len(self.body_))
+    if (self.has_raw_xml_): n += 2
+    if (self.has_type_): n += 1 + self.lengthString(len(self.type_))
+    if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_jid()
+    self.clear_body()
+    self.clear_raw_xml()
+    self.clear_type()
+    self.clear_from_jid()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.jid_)):
+      out.putVarInt32(10)
+      out.putPrefixedString(self.jid_[i])
+    out.putVarInt32(18)
+    out.putPrefixedString(self.body_)
+    if (self.has_raw_xml_):
+      out.putVarInt32(24)
+      out.putBoolean(self.raw_xml_)
+    if (self.has_type_):
+      out.putVarInt32(34)
+      out.putPrefixedString(self.type_)
+    if (self.has_from_jid_):
+      out.putVarInt32(42)
+      out.putPrefixedString(self.from_jid_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.add_jid(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_body(d.getPrefixedString())
+        continue
+      if tt == 24:
+        self.set_raw_xml(d.getBoolean())
+        continue
+      if tt == 34:
+        self.set_type(d.getPrefixedString())
+        continue
+      if tt == 42:
+        self.set_from_jid(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.jid_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("jid%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+    if self.has_raw_xml_: res+=prefix+("raw_xml: %s\n" % self.DebugFormatBool(self.raw_xml_))
+    if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
+    if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kjid = 1
+  kbody = 2
+  kraw_xml = 3
+  ktype = 4
+  kfrom_jid = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "jid",
+    2: "body",
+    3: "raw_xml",
+    4: "type",
+    5: "from_jid",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppMessageResponse(ProtocolBuffer.ProtocolMessage):
+
+  NO_ERROR     =    0
+  INVALID_JID  =    1
+  OTHER_ERROR  =    2
+
+  _XmppMessageStatus_NAMES = {
+    0: "NO_ERROR",
+    1: "INVALID_JID",
+    2: "OTHER_ERROR",
+  }
+
+  def XmppMessageStatus_Name(cls, x): return cls._XmppMessageStatus_NAMES.get(x, "")
+  XmppMessageStatus_Name = classmethod(XmppMessageStatus_Name)
+
+
+  def __init__(self, contents=None):
+    self.status_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def status_size(self): return len(self.status_)
+  def status_list(self): return self.status_
+
+  def status(self, i):
+    return self.status_[i]
+
+  def set_status(self, i, x):
+    self.status_[i] = x
+
+  def add_status(self, x):
+    self.status_.append(x)
+
+  def clear_status(self):
+    self.status_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    for i in xrange(x.status_size()): self.add_status(x.status(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if len(self.status_) != len(x.status_): return 0
+    for e1, e2 in zip(self.status_, x.status_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += 1 * len(self.status_)
+    for i in xrange(len(self.status_)): n += self.lengthVarInt64(self.status_[i])
+    return n + 0
+
+  def Clear(self):
+    self.clear_status()
+
+  def OutputUnchecked(self, out):
+    for i in xrange(len(self.status_)):
+      out.putVarInt32(8)
+      out.putVarInt32(self.status_[i])
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.add_status(d.getVarInt32())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    cnt=0
+    for e in self.status_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+      cnt+=1
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kstatus = 1
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "status",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppInviteRequest(ProtocolBuffer.ProtocolMessage):
+  has_jid_ = 0
+  jid_ = ""
+  has_from_jid_ = 0
+  from_jid_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def jid(self): return self.jid_
+
+  def set_jid(self, x):
+    self.has_jid_ = 1
+    self.jid_ = x
+
+  def clear_jid(self):
+    if self.has_jid_:
+      self.has_jid_ = 0
+      self.jid_ = ""
+
+  def has_jid(self): return self.has_jid_
+
+  def from_jid(self): return self.from_jid_
+
+  def set_from_jid(self, x):
+    self.has_from_jid_ = 1
+    self.from_jid_ = x
+
+  def clear_from_jid(self):
+    if self.has_from_jid_:
+      self.has_from_jid_ = 0
+      self.from_jid_ = ""
+
+  def has_from_jid(self): return self.has_from_jid_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_jid()): self.set_jid(x.jid())
+    if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_jid_ != x.has_jid_: return 0
+    if self.has_jid_ and self.jid_ != x.jid_: return 0
+    if self.has_from_jid_ != x.has_from_jid_: return 0
+    if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_jid_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: jid not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.jid_))
+    if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+    return n + 1
+
+  def Clear(self):
+    self.clear_jid()
+    self.clear_from_jid()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.jid_)
+    if (self.has_from_jid_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.from_jid_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_jid(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_from_jid(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+    if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kjid = 1
+  kfrom_jid = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "jid",
+    2: "from_jid",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class XmppInviteResponse(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n + 0
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['XmppServiceError','PresenceRequest','PresenceResponse','XmppMessageRequest','XmppMessageResponse','XmppInviteRequest','XmppInviteResponse']
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the XMPP API, writes messages to logs."""
+
+
+
+
+
+import logging
+import os
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import xmpp
+from google.appengine.api.xmpp import xmpp_service_pb
+
+
+class XmppServiceStub(apiproxy_stub.APIProxyStub):
+  """Python only xmpp service stub.
+
+  This stub does not use an XMPP network. It prints messages to the console
+  instead of sending any stanzas.
+  """
+
+  def __init__(self, log=logging.info, service_name='xmpp'):
+    """Initializer.
+
+    Args:
+      log: A logger, used for dependency injection.
+      service_name: Service name expected for all calls.
+    """
+    super(XmppServiceStub, self).__init__(service_name)
+    self.log = log
+
+  def _Dynamic_GetPresence(self, request, response):
+    """Implementation of XmppService::GetPresence.
+
+    Returns online if the first character of the JID comes before 'm' in the
+    alphabet, otherwise returns offline.
+
+    Args:
+      request: A PresenceRequest.
+      response: A PresenceResponse.
+    """
+    jid = request.jid()
+    self._GetFrom(request.from_jid())
+    if jid[0] < 'm':
+      response.set_is_available(True)
+    else:
+      response.set_is_available(False)
+
+  def _Dynamic_SendMessage(self, request, response):
+    """Implementation of XmppService::SendMessage.
+
+    Args:
+      request: An XmppMessageRequest.
+      response: An XmppMessageResponse .
+    """
+    from_jid = self._GetFrom(request.from_jid())
+    self.log('Sending an XMPP Message:')
+    self.log('    From:')
+    self.log('       ' + from_jid)
+    self.log('    Body:')
+    self.log('       ' + request.body())
+    self.log('    Type:')
+    self.log('       ' + request.type())
+    self.log('    Raw Xml:')
+    self.log('       ' + str(request.raw_xml()))
+    self.log('    To JIDs:')
+    for jid in request.jid_list():
+      self.log('       ' + jid)
+
+    for jid in request.jid_list():
+      response.add_status(xmpp_service_pb.XmppMessageResponse.NO_ERROR)
+
+  def _Dynamic_SendInvite(self, request, response):
+    """Implementation of XmppService::SendInvite.
+
+    Args:
+      request: An XmppInviteRequest.
+      response: An XmppInviteResponse .
+    """
+    from_jid = self._GetFrom(request.from_jid())
+    self.log('Sending an XMPP Invite:')
+    self.log('    From:')
+    self.log('       ' + from_jid)
+    self.log('    To: ' + request.jid())
+
+  def _GetFrom(self, requested):
+    """Validates that the from JID is valid.
+
+    Args:
+      requested: The requested from JID.
+
+    Returns:
+      string, The from JID.
+
+    Raises:
+      xmpp.InvalidJidError if the requested JID is invalid.
+    """
+
+    appid = os.environ.get('APPLICATION_ID', '')
+    if requested == None or requested == '':
+      return appid + '@appspot.com/bot'
+
+    node, domain, resource = ('', '', '')
+    at = requested.find('@')
+    if at == -1:
+      self.log('Invalid From JID: No \'@\' character found. JID: %s', requested)
+      raise xmpp.InvalidJidError()
+
+    node = requested[:at]
+    rest = requested[at+1:]
+
+    if rest.find('@') > -1:
+      self.log('Invalid From JID: Second \'@\' character found. JID: %s',
+               requested)
+      raise xmpp.InvalidJidError()
+
+    slash = rest.find('/')
+    if slash == -1:
+      domain = rest
+      resource = 'bot'
+    else:
+      domain = rest[:slash]
+      resource = rest[slash+1:]
+
+    if resource.find('/') > -1:
+      self.log('Invalid From JID: Second \'/\' character found. JID: %s',
+               requested)
+      raise xmpp.InvalidJidError()
+
+    if domain == 'appspot.com' and node == appid:
+      return node + '@' + domain + '/' + resource
+    elif domain == appid + '.appspotchat.com':
+      return node + '@' + domain + '/' + resource
+
+    self.log('Invalid From JID: Must be appid@appspot.com[/resource] or '
+             'node@appid.appspotchat.com[/resource]. JID: %s', requested)
+    raise xmpp.InvalidJidError()
--- a/thirdparty/google_appengine/google/appengine/base/capabilities_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/base/capabilities_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -143,22 +143,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kconfig = 1
   kdefault_config = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "config",
-   "default_config",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "config",
+    2: "default_config",
+  }, 2)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -409,6 +411,10 @@
     if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kpackage = 1
   kcapability = 2
   kstatus = 3
@@ -417,34 +423,27 @@
   kadmin_message = 5
   kerror_message = 6
 
-  _TEXT = (
-   "ErrorCode",
-   "package",
-   "capability",
-   "status",
-   "internal_message",
-   "admin_message",
-   "error_message",
-   "scheduled_time",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "package",
+    2: "capability",
+    3: "status",
+    4: "internal_message",
+    5: "admin_message",
+    6: "error_message",
+    7: "scheduled_time",
+  }, 7)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+    7: ProtocolBuffer.Encoder.STRING,
+  }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/cron/groctimespecification.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/cron/groctimespecification.py	Sun Sep 06 23:31:53 2009 +0200
@@ -47,9 +47,12 @@
 
 try:
   from pytz import NonExistentTimeError
+  from pytz import AmbiguousTimeError
 except ImportError:
   class NonExistentTimeError(Exception):
     pass
+  class AmbiguousTimeError(Exception):
+    pass
 
 
 def GrocTimeSpecification(schedule):
@@ -119,7 +122,7 @@
   """
 
   def __init__(self, interval, period):
-    super(IntervalTimeSpecification, self).__init__(self)
+    super(IntervalTimeSpecification, self).__init__()
     self.interval = interval
     self.period = period
 
@@ -286,13 +289,15 @@
                                       microsecond=0)
         if self.timezone and pytz is not None:
           try:
+            out = self.timezone.localize(out, is_dst=None)
+          except AmbiguousTimeError:
             out = self.timezone.localize(out)
-          except (NonExistentTimeError, IndexError):
+          except NonExistentTimeError:
             for _ in range(24):
               out = out.replace(minute=1) + datetime.timedelta(minutes=60)
               try:
                 out = self.timezone.localize(out)
-              except (NonExistentTimeError, IndexError):
+              except NonExistentTimeError:
                 continue
               break
           out = out.astimezone(pytz.utc)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/datastore/action_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+                   unusednames=printElemNumber,debug_strs no-special"""
+
+class Action(ProtocolBuffer.ProtocolMessage):
+  pass
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_index.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_index.py	Sun Sep 06 23:31:53 2009 +0200
@@ -192,6 +192,74 @@
                         ))
 
 
+def Normalize(filters, orders):
+  """ Normalizes filter and order query components.
+
+  The resulting components have the same effect as the given components if used
+  in a query.
+
+  Returns:
+    (filter, orders) the reduced set of filters and orders
+  """
+
+  for f in filters:
+    if f.op() == datastore_pb.Query_Filter.IN and f.property_size() == 1:
+      f.set_op(datastore_pb.Query_Filter.EQUAL);
+
+  eq_properties = set([f.property(0).name() for f in filters if f.op() == datastore_pb.Query_Filter.EQUAL]);
+
+  remove_set = eq_properties.copy()
+  new_orders = []
+  for o in orders:
+    if o.property() not in remove_set:
+      remove_set.add(o.property())
+      new_orders.append(o)
+  orders = new_orders
+
+
+  if datastore_types._KEY_SPECIAL_PROPERTY in eq_properties:
+    orders = []
+
+  new_orders = []
+  for o in orders:
+    if o.property() == datastore_types._KEY_SPECIAL_PROPERTY:
+      new_orders.append(o)
+      break
+    new_orders.append(o)
+  orders = new_orders
+
+  return (filters, orders)
+
+
+def RemoveNativelySupportedComponents(filters, orders):
+  """ Removes query components that are natively supported by the datastore.
+
+  The resulting filters and orders should not be used in an actual query.
+
+  Returns
+    (filters, orders) the reduced set of filters and orders
+  """
+  (filters, orders) = Normalize(filters, orders)
+
+  has_key_desc_order = False
+  if orders and orders[-1].property() == datastore_types._KEY_SPECIAL_PROPERTY:
+    if orders[-1].direction() == ASCENDING:
+      orders = orders[:-1]
+    else:
+      has_key_desc_order = True
+
+  if not has_key_desc_order:
+    for f in filters:
+      if (f.op() in INEQUALITY_OPERATORS and
+          f.property(0).name() != datastore_types._KEY_SPECIAL_PROPERTY):
+        break
+    else:
+      filters = [f for f in filters
+          if f.property(0).name() != datastore_types._KEY_SPECIAL_PROPERTY]
+
+  return (filters, orders)
+
+
 def CompositeIndexForQuery(query):
   """Return the composite index needed for a query.
 
@@ -213,12 +281,18 @@
     can be at most one of these.
 
   - After that come all the (property, direction) pairs for the Order
-    entries, in the order given in the query.  Exceptions: (a) if
-    there is a Filter entry with an inequality operator that matches
-    the first Order entry, the first order pair is omitted (or,
-    equivalently, in this case the inequality pair is omitted); (b) if
-    an Order entry corresponds to an equality filter, it is ignored
-    (since there will only ever be one value returned).
+    entries, in the order given in the query.  Exceptions:
+      (a) if there is a Filter entry with an inequality operator that matches
+          the first Order entry, the first order pair is omitted (or,
+          equivalently, in this case the inequality pair is omitted).
+      (b) if an Order entry corresponds to an equality filter, it is ignored
+          (since there will only ever be one value returned).
+      (c) if there is an equality filter on __key__ all orders are dropped
+          (since there will be at most one result returned).
+      (d) if there is an order on __key__ all further orders are dropped (since
+          keys are unique).
+      (e) orders on __key__ ASCENDING are dropped (since this is supported
+          natively by the datastore).
 
   - Finally, if there are Filter entries whose operator is EXISTS, and
     whose property names are not already listed, they are added, with
@@ -271,16 +345,18 @@
     nprops = len(filter.property_list())
     assert nprops == 1, 'Filter has %s properties, expected 1' % nprops
 
-  if ancestor and not kind and not filters and not orders:
+  if not kind:
     required = False
 
+  (filters, orders) = RemoveNativelySupportedComponents(filters, orders)
+
   eq_filters = [f for f in filters if f.op() in EQUALITY_OPERATORS]
   ineq_filters = [f for f in filters if f.op() in INEQUALITY_OPERATORS]
   exists_filters = [f for f in filters if f.op() in EXISTS_OPERATORS]
   assert (len(eq_filters) + len(ineq_filters) +
           len(exists_filters)) == len(filters), 'Not all filters used'
 
-  if (kind and eq_filters and not ineq_filters and not exists_filters and
+  if (kind and not ineq_filters and not exists_filters and
       not orders):
     names = set(f.property(0).name() for f in eq_filters)
     if not names.intersection(datastore_types._SPECIAL_PROPERTIES):
@@ -292,16 +368,6 @@
     for filter in ineq_filters:
       assert filter.property(0).name() == ineq_property
 
-  new_orders = []
-  for order in orders:
-    name = order.property()
-    for filter in eq_filters:
-      if filter.property(0).name() == name:
-        break
-    else:
-      new_orders.append(order)
-  orders = new_orders
-
   props = []
 
   for f in eq_filters:
--- a/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -25,10 +25,12 @@
 from google.appengine.api.api_base_pb import Integer64Proto;
 from google.appengine.api.api_base_pb import StringProto;
 from google.appengine.api.api_base_pb import VoidProto;
+from google.appengine.datastore.action_pb import Action
 from google.appengine.datastore.entity_pb import CompositeIndex
 from google.appengine.datastore.entity_pb import EntityProto
 from google.appengine.datastore.entity_pb import Index
 from google.appengine.datastore.entity_pb import Property
+from google.appengine.datastore.entity_pb import Path
 from google.appengine.datastore.entity_pb import Reference
 class Transaction(ProtocolBuffer.ProtocolMessage):
   has_handle_ = 0
@@ -95,18 +97,21 @@
     if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   khandle = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "handle",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.DOUBLE,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "handle",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -374,6 +379,8 @@
   search_query_ = ""
   has_hint_ = 0
   hint_ = 0
+  has_count_ = 0
+  count_ = 0
   has_offset_ = 0
   offset_ = 0
   has_limit_ = 0
@@ -384,6 +391,10 @@
   keys_only_ = 0
   has_transaction_ = 0
   transaction_ = None
+  has_distinct_ = 0
+  distinct_ = 0
+  has_compile_ = 0
+  compile_ = 0
 
   def __init__(self, contents=None):
     self.filter_ = []
@@ -494,6 +505,19 @@
 
   def has_hint(self): return self.has_hint_
 
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
   def offset(self): return self.offset_
 
   def set_offset(self, x):
@@ -580,6 +604,32 @@
 
   def has_transaction(self): return self.has_transaction_
 
+  def distinct(self): return self.distinct_
+
+  def set_distinct(self, x):
+    self.has_distinct_ = 1
+    self.distinct_ = x
+
+  def clear_distinct(self):
+    if self.has_distinct_:
+      self.has_distinct_ = 0
+      self.distinct_ = 0
+
+  def has_distinct(self): return self.has_distinct_
+
+  def compile(self): return self.compile_
+
+  def set_compile(self, x):
+    self.has_compile_ = 1
+    self.compile_ = x
+
+  def clear_compile(self):
+    if self.has_compile_:
+      self.has_compile_ = 0
+      self.compile_ = 0
+
+  def has_compile(self): return self.has_compile_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -590,12 +640,15 @@
     if (x.has_search_query()): self.set_search_query(x.search_query())
     for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
     if (x.has_hint()): self.set_hint(x.hint())
+    if (x.has_count()): self.set_count(x.count())
     if (x.has_offset()): self.set_offset(x.offset())
     if (x.has_limit()): self.set_limit(x.limit())
     for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
     if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
     if (x.has_keys_only()): self.set_keys_only(x.keys_only())
     if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_distinct()): self.set_distinct(x.distinct())
+    if (x.has_compile()): self.set_compile(x.compile())
 
   def Equals(self, x):
     if x is self: return 1
@@ -615,6 +668,8 @@
       if e1 != e2: return 0
     if self.has_hint_ != x.has_hint_: return 0
     if self.has_hint_ and self.hint_ != x.hint_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
     if self.has_offset_ != x.has_offset_: return 0
     if self.has_offset_ and self.offset_ != x.offset_: return 0
     if self.has_limit_ != x.has_limit_: return 0
@@ -628,6 +683,10 @@
     if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
     if self.has_transaction_ != x.has_transaction_: return 0
     if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_distinct_ != x.has_distinct_: return 0
+    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+    if self.has_compile_ != x.has_compile_: return 0
+    if self.has_compile_ and self.compile_ != x.compile_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -657,6 +716,7 @@
     n += 2 * len(self.order_)
     for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
     if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
+    if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
     if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
     if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
     n += 2 * len(self.composite_index_)
@@ -664,6 +724,8 @@
     if (self.has_require_perfect_plan_): n += 3
     if (self.has_keys_only_): n += 3
     if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
+    if (self.has_distinct_): n += 3
+    if (self.has_compile_): n += 3
     return n + 1
 
   def Clear(self):
@@ -674,12 +736,15 @@
     self.clear_search_query()
     self.clear_order()
     self.clear_hint()
+    self.clear_count()
     self.clear_offset()
     self.clear_limit()
     self.clear_composite_index()
     self.clear_require_perfect_plan()
     self.clear_keys_only()
     self.clear_transaction()
+    self.clear_distinct()
+    self.clear_compile()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -725,6 +790,15 @@
       out.putVarInt32(178)
       out.putVarInt32(self.transaction_.ByteSize())
       self.transaction_.OutputUnchecked(out)
+    if (self.has_count_):
+      out.putVarInt32(184)
+      out.putVarInt32(self.count_)
+    if (self.has_distinct_):
+      out.putVarInt32(192)
+      out.putBoolean(self.distinct_)
+    if (self.has_compile_):
+      out.putVarInt32(200)
+      out.putBoolean(self.compile_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -777,6 +851,15 @@
         d.skip(length)
         self.mutable_transaction().TryMerge(tmp)
         continue
+      if tt == 184:
+        self.set_count(d.getVarInt32())
+        continue
+      if tt == 192:
+        self.set_distinct(d.getBoolean())
+        continue
+      if tt == 200:
+        self.set_compile(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -807,6 +890,7 @@
       res+=prefix+"}\n"
       cnt+=1
     if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
     if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
     if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
     cnt=0
@@ -823,8 +907,14 @@
       res+=prefix+"transaction <\n"
       res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp = 1
   kkind = 3
   kancestor = 17
@@ -836,86 +926,981 @@
   kOrderproperty = 10
   kOrderdirection = 11
   khint = 18
+  kcount = 23
   koffset = 12
   klimit = 16
   kcomposite_index = 19
   krequire_perfect_plan = 20
   kkeys_only = 21
   ktransaction = 22
-
-  _TEXT = (
-   "ErrorCode",
-   "app",
-   None,
-   "kind",
-   "Filter",
-   None,
-   "op",
-   None,
-   "search_query",
-   "Order",
-   "property",
-   "direction",
-   "offset",
-   None,
-   "property",
-   None,
-   "limit",
-   "ancestor",
-   "hint",
-   "composite_index",
-   "require_perfect_plan",
-   "keys_only",
-   "transaction",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  kdistinct = 24
+  kcompile = 25
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app",
+    3: "kind",
+    4: "Filter",
+    6: "op",
+    8: "search_query",
+    9: "Order",
+    10: "property",
+    11: "direction",
+    12: "offset",
+    14: "property",
+    16: "limit",
+    17: "ancestor",
+    18: "hint",
+    19: "composite_index",
+    20: "require_perfect_plan",
+    21: "keys_only",
+    22: "transaction",
+    23: "count",
+    24: "distinct",
+    25: "compile",
+  }, 25)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STARTGROUP,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STARTGROUP,
+    10: ProtocolBuffer.Encoder.STRING,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+    12: ProtocolBuffer.Encoder.NUMERIC,
+    14: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.NUMERIC,
+    19: ProtocolBuffer.Encoder.STRING,
+    20: ProtocolBuffer.Encoder.NUMERIC,
+    21: ProtocolBuffer.Encoder.NUMERIC,
+    22: ProtocolBuffer.Encoder.STRING,
+    23: ProtocolBuffer.Encoder.NUMERIC,
+    24: ProtocolBuffer.Encoder.NUMERIC,
+    25: ProtocolBuffer.Encoder.NUMERIC,
+  }, 25, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
+  has_index_name_ = 0
+  index_name_ = ""
+  has_start_key_ = 0
+  start_key_ = ""
+  has_start_inclusive_ = 0
+  start_inclusive_ = 0
+  has_end_key_ = 0
+  end_key_ = ""
+  has_end_inclusive_ = 0
+  end_inclusive_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_name(self): return self.index_name_
+
+  def set_index_name(self, x):
+    self.has_index_name_ = 1
+    self.index_name_ = x
+
+  def clear_index_name(self):
+    if self.has_index_name_:
+      self.has_index_name_ = 0
+      self.index_name_ = ""
+
+  def has_index_name(self): return self.has_index_name_
+
+  def start_key(self): return self.start_key_
+
+  def set_start_key(self, x):
+    self.has_start_key_ = 1
+    self.start_key_ = x
+
+  def clear_start_key(self):
+    if self.has_start_key_:
+      self.has_start_key_ = 0
+      self.start_key_ = ""
+
+  def has_start_key(self): return self.has_start_key_
+
+  def start_inclusive(self): return self.start_inclusive_
+
+  def set_start_inclusive(self, x):
+    self.has_start_inclusive_ = 1
+    self.start_inclusive_ = x
+
+  def clear_start_inclusive(self):
+    if self.has_start_inclusive_:
+      self.has_start_inclusive_ = 0
+      self.start_inclusive_ = 0
+
+  def has_start_inclusive(self): return self.has_start_inclusive_
+
+  def end_key(self): return self.end_key_
+
+  def set_end_key(self, x):
+    self.has_end_key_ = 1
+    self.end_key_ = x
+
+  def clear_end_key(self):
+    if self.has_end_key_:
+      self.has_end_key_ = 0
+      self.end_key_ = ""
+
+  def has_end_key(self): return self.has_end_key_
+
+  def end_inclusive(self): return self.end_inclusive_
+
+  def set_end_inclusive(self, x):
+    self.has_end_inclusive_ = 1
+    self.end_inclusive_ = x
+
+  def clear_end_inclusive(self):
+    if self.has_end_inclusive_:
+      self.has_end_inclusive_ = 0
+      self.end_inclusive_ = 0
+
+  def has_end_inclusive(self): return self.has_end_inclusive_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_name()): self.set_index_name(x.index_name())
+    if (x.has_start_key()): self.set_start_key(x.start_key())
+    if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
+    if (x.has_end_key()): self.set_end_key(x.end_key())
+    if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_name_ != x.has_index_name_: return 0
+    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+    if self.has_start_key_ != x.has_start_key_: return 0
+    if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
+    if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
+    if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
+    if self.has_end_key_ != x.has_end_key_: return 0
+    if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
+    if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
+    if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
+    if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
+    if (self.has_start_inclusive_): n += 2
+    if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
+    if (self.has_end_inclusive_): n += 2
+    return n + 0
+
+  def Clear(self):
+    self.clear_index_name()
+    self.clear_start_key()
+    self.clear_start_inclusive()
+    self.clear_end_key()
+    self.clear_end_inclusive()
+
+  def OutputUnchecked(self, out):
+    if (self.has_index_name_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.index_name_)
+    if (self.has_start_key_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.start_key_)
+    if (self.has_start_inclusive_):
+      out.putVarInt32(32)
+      out.putBoolean(self.start_inclusive_)
+    if (self.has_end_key_):
+      out.putVarInt32(42)
+      out.putPrefixedString(self.end_key_)
+    if (self.has_end_inclusive_):
+      out.putVarInt32(48)
+      out.putBoolean(self.end_inclusive_)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 12: break
+      if tt == 18:
+        self.set_index_name(d.getPrefixedString())
+        continue
+      if tt == 26:
+        self.set_start_key(d.getPrefixedString())
+        continue
+      if tt == 32:
+        self.set_start_inclusive(d.getBoolean())
+        continue
+      if tt == 42:
+        self.set_end_key(d.getPrefixedString())
+        continue
+      if tt == 48:
+        self.set_end_inclusive(d.getBoolean())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+    if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
+    if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
+    if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
+    if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
+    return res
+
+class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
+  has_index_name_ = 0
+  index_name_ = ""
+
+  def __init__(self, contents=None):
+    self.prefix_value_ = []
+    if contents is not None: self.MergeFromString(contents)
+
+  def index_name(self): return self.index_name_
+
+  def set_index_name(self, x):
+    self.has_index_name_ = 1
+    self.index_name_ = x
+
+  def clear_index_name(self):
+    if self.has_index_name_:
+      self.has_index_name_ = 0
+      self.index_name_ = ""
+
+  def has_index_name(self): return self.has_index_name_
+
+  def prefix_value_size(self): return len(self.prefix_value_)
+  def prefix_value_list(self): return self.prefix_value_
+
+  def prefix_value(self, i):
+    return self.prefix_value_[i]
+
+  def set_prefix_value(self, i, x):
+    self.prefix_value_[i] = x
+
+  def add_prefix_value(self, x):
+    self.prefix_value_.append(x)
+
+  def clear_prefix_value(self):
+    self.prefix_value_ = []
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_index_name()): self.set_index_name(x.index_name())
+    for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_index_name_ != x.has_index_name_: return 0
+    if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
+    if len(self.prefix_value_) != len(x.prefix_value_): return 0
+    for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
+      if e1 != e2: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_index_name_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: index_name not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.index_name_))
+    n += 1 * len(self.prefix_value_)
+    for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
+    return n + 1
+
+  def Clear(self):
+    self.clear_index_name()
+    self.clear_prefix_value()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(66)
+    out.putPrefixedString(self.index_name_)
+    for i in xrange(len(self.prefix_value_)):
+      out.putVarInt32(74)
+      out.putPrefixedString(self.prefix_value_[i])
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 60: break
+      if tt == 66:
+        self.set_index_name(d.getPrefixedString())
+        continue
+      if tt == 74:
+        self.add_prefix_value(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
+    cnt=0
+    for e in self.prefix_value_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
+      cnt+=1
+    return res
+
+class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
+  has_distinct_ = 0
+  distinct_ = 0
+  has_offset_ = 0
+  offset_ = 0
+  has_limit_ = 0
+  limit_ = 0
+  has_kind_ = 0
+  kind_ = ""
+  has_ancestor_ = 0
+  ancestor_ = None
+
+  def __init__(self, contents=None):
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def distinct(self): return self.distinct_
+
+  def set_distinct(self, x):
+    self.has_distinct_ = 1
+    self.distinct_ = x
+
+  def clear_distinct(self):
+    if self.has_distinct_:
+      self.has_distinct_ = 0
+      self.distinct_ = 0
+
+  def has_distinct(self): return self.has_distinct_
+
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+  def kind(self): return self.kind_
+
+  def set_kind(self, x):
+    self.has_kind_ = 1
+    self.kind_ = x
+
+  def clear_kind(self):
+    if self.has_kind_:
+      self.has_kind_ = 0
+      self.kind_ = ""
+
+  def has_kind(self): return self.has_kind_
+
+  def ancestor(self):
+    if self.ancestor_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.ancestor_ is None: self.ancestor_ = Reference()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.ancestor_
+
+  def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
+
+  def clear_ancestor(self):
+    if self.has_ancestor_:
+      self.has_ancestor_ = 0;
+      if self.ancestor_ is not None: self.ancestor_.Clear()
+
+  def has_ancestor(self): return self.has_ancestor_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_distinct()): self.set_distinct(x.distinct())
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_limit()): self.set_limit(x.limit())
+    if (x.has_kind()): self.set_kind(x.kind())
+    if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_distinct_ != x.has_distinct_: return 0
+    if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    if self.has_kind_ != x.has_kind_: return 0
+    if self.has_kind_ and self.kind_ != x.kind_: return 0
+    if self.has_ancestor_ != x.has_ancestor_: return 0
+    if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    if (self.has_distinct_): n += 2
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
+    if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
+    if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
+    return n + 0
+
+  def Clear(self):
+    self.clear_distinct()
+    self.clear_offset()
+    self.clear_limit()
+    self.clear_kind()
+    self.clear_ancestor()
+
+  def OutputUnchecked(self, out):
+    if (self.has_distinct_):
+      out.putVarInt32(112)
+      out.putBoolean(self.distinct_)
+    if (self.has_offset_):
+      out.putVarInt32(120)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(128)
+      out.putVarInt32(self.limit_)
+    if (self.has_kind_):
+      out.putVarInt32(138)
+      out.putPrefixedString(self.kind_)
+    if (self.has_ancestor_):
+      out.putVarInt32(146)
+      out.putVarInt32(self.ancestor_.ByteSize())
+      self.ancestor_.OutputUnchecked(out)
+
+  def TryMerge(self, d):
+    while 1:
+      tt = d.getVarInt32()
+      if tt == 108: break
+      if tt == 112:
+        self.set_distinct(d.getBoolean())
+        continue
+      if tt == 120:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 128:
+        self.set_limit(d.getVarInt32())
+        continue
+      if tt == 138:
+        self.set_kind(d.getPrefixedString())
+        continue
+      if tt == 146:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_ancestor().TryMerge(tmp)
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+    if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
+    if self.has_ancestor_:
+      res+=prefix+"ancestor <\n"
+      res+=self.ancestor_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+class CompiledQuery(ProtocolBuffer.ProtocolMessage):
+  has_primaryscan_ = 0
+  has_offset_ = 0
+  offset_ = 0
+  has_limit_ = 0
+  limit_ = 0
+  has_keys_only_ = 0
+  keys_only_ = 0
+  has_entityfilter_ = 0
+  entityfilter_ = None
+
+  def __init__(self, contents=None):
+    self.primaryscan_ = CompiledQuery_PrimaryScan()
+    self.mergejoinscan_ = []
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def primaryscan(self): return self.primaryscan_
+
+  def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
+
+  def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
+
+  def has_primaryscan(self): return self.has_primaryscan_
+
+  def mergejoinscan_size(self): return len(self.mergejoinscan_)
+  def mergejoinscan_list(self): return self.mergejoinscan_
+
+  def mergejoinscan(self, i):
+    return self.mergejoinscan_[i]
+
+  def mutable_mergejoinscan(self, i):
+    return self.mergejoinscan_[i]
+
+  def add_mergejoinscan(self):
+    x = CompiledQuery_MergeJoinScan()
+    self.mergejoinscan_.append(x)
+    return x
+
+  def clear_mergejoinscan(self):
+    self.mergejoinscan_ = []
+  def offset(self): return self.offset_
+
+  def set_offset(self, x):
+    self.has_offset_ = 1
+    self.offset_ = x
+
+  def clear_offset(self):
+    if self.has_offset_:
+      self.has_offset_ = 0
+      self.offset_ = 0
+
+  def has_offset(self): return self.has_offset_
+
+  def limit(self): return self.limit_
+
+  def set_limit(self, x):
+    self.has_limit_ = 1
+    self.limit_ = x
+
+  def clear_limit(self):
+    if self.has_limit_:
+      self.has_limit_ = 0
+      self.limit_ = 0
+
+  def has_limit(self): return self.has_limit_
+
+  def keys_only(self): return self.keys_only_
+
+  def set_keys_only(self, x):
+    self.has_keys_only_ = 1
+    self.keys_only_ = x
+
+  def clear_keys_only(self):
+    if self.has_keys_only_:
+      self.has_keys_only_ = 0
+      self.keys_only_ = 0
+
+  def has_keys_only(self): return self.has_keys_only_
+
+  def entityfilter(self):
+    if self.entityfilter_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.entityfilter_
+
+  def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
+
+  def clear_entityfilter(self):
+    if self.has_entityfilter_:
+      self.has_entityfilter_ = 0;
+      if self.entityfilter_ is not None: self.entityfilter_.Clear()
+
+  def has_entityfilter(self): return self.has_entityfilter_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
+    for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
+    if (x.has_offset()): self.set_offset(x.offset())
+    if (x.has_limit()): self.set_limit(x.limit())
+    if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_primaryscan_ != x.has_primaryscan_: return 0
+    if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
+    if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
+    for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
+      if e1 != e2: return 0
+    if self.has_offset_ != x.has_offset_: return 0
+    if self.has_offset_ and self.offset_ != x.offset_: return 0
+    if self.has_limit_ != x.has_limit_: return 0
+    if self.has_limit_ and self.limit_ != x.limit_: return 0
+    if self.has_keys_only_ != x.has_keys_only_: return 0
+    if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if self.has_entityfilter_ != x.has_entityfilter_: return 0
+    if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_primaryscan_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: primaryscan not set.')
+    elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
+    for p in self.mergejoinscan_:
+      if not p.IsInitialized(debug_strs): initialized=0
+    if (not self.has_keys_only_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: keys_only not set.')
+    if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.primaryscan_.ByteSize()
+    n += 2 * len(self.mergejoinscan_)
+    for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
+    if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
+    if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
+    if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
+    return n + 4
+
+  def Clear(self):
+    self.clear_primaryscan()
+    self.clear_mergejoinscan()
+    self.clear_offset()
+    self.clear_limit()
+    self.clear_keys_only()
+    self.clear_entityfilter()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(11)
+    self.primaryscan_.OutputUnchecked(out)
+    out.putVarInt32(12)
+    for i in xrange(len(self.mergejoinscan_)):
+      out.putVarInt32(59)
+      self.mergejoinscan_[i].OutputUnchecked(out)
+      out.putVarInt32(60)
+    if (self.has_offset_):
+      out.putVarInt32(80)
+      out.putVarInt32(self.offset_)
+    if (self.has_limit_):
+      out.putVarInt32(88)
+      out.putVarInt32(self.limit_)
+    out.putVarInt32(96)
+    out.putBoolean(self.keys_only_)
+    if (self.has_entityfilter_):
+      out.putVarInt32(107)
+      self.entityfilter_.OutputUnchecked(out)
+      out.putVarInt32(108)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 11:
+        self.mutable_primaryscan().TryMerge(d)
+        continue
+      if tt == 59:
+        self.add_mergejoinscan().TryMerge(d)
+        continue
+      if tt == 80:
+        self.set_offset(d.getVarInt32())
+        continue
+      if tt == 88:
+        self.set_limit(d.getVarInt32())
+        continue
+      if tt == 96:
+        self.set_keys_only(d.getBoolean())
+        continue
+      if tt == 107:
+        self.mutable_entityfilter().TryMerge(d)
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_primaryscan_:
+      res+=prefix+"PrimaryScan {\n"
+      res+=self.primaryscan_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    cnt=0
+    for e in self.mergejoinscan_:
+      elm=""
+      if printElemNumber: elm="(%d)" % cnt
+      res+=prefix+("MergeJoinScan%s {\n" % elm)
+      res+=e.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+      cnt+=1
+    if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
+    if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
+    if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    if self.has_entityfilter_:
+      res+=prefix+"EntityFilter {\n"
+      res+=self.entityfilter_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+"}\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kPrimaryScanGroup = 1
+  kPrimaryScanindex_name = 2
+  kPrimaryScanstart_key = 3
+  kPrimaryScanstart_inclusive = 4
+  kPrimaryScanend_key = 5
+  kPrimaryScanend_inclusive = 6
+  kMergeJoinScanGroup = 7
+  kMergeJoinScanindex_name = 8
+  kMergeJoinScanprefix_value = 9
+  koffset = 10
+  klimit = 11
+  kkeys_only = 12
+  kEntityFilterGroup = 13
+  kEntityFilterdistinct = 14
+  kEntityFilteroffset = 15
+  kEntityFilterlimit = 16
+  kEntityFilterkind = 17
+  kEntityFilterancestor = 18
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "PrimaryScan",
+    2: "index_name",
+    3: "start_key",
+    4: "start_inclusive",
+    5: "end_key",
+    6: "end_inclusive",
+    7: "MergeJoinScan",
+    8: "index_name",
+    9: "prefix_value",
+    10: "offset",
+    11: "limit",
+    12: "keys_only",
+    13: "EntityFilter",
+    14: "distinct",
+    15: "offset",
+    16: "limit",
+    17: "kind",
+    18: "ancestor",
+  }, 18)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.NUMERIC,
+    7: ProtocolBuffer.Encoder.STARTGROUP,
+    8: ProtocolBuffer.Encoder.STRING,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.NUMERIC,
+    11: ProtocolBuffer.Encoder.NUMERIC,
+    12: ProtocolBuffer.Encoder.NUMERIC,
+    13: ProtocolBuffer.Encoder.STARTGROUP,
+    14: ProtocolBuffer.Encoder.NUMERIC,
+    15: ProtocolBuffer.Encoder.NUMERIC,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.STRING,
+  }, 18, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class RunCompiledQueryRequest(ProtocolBuffer.ProtocolMessage):
+  has_compiled_query_ = 0
+  has_original_query_ = 0
+  original_query_ = None
+  has_count_ = 0
+  count_ = 0
+
+  def __init__(self, contents=None):
+    self.compiled_query_ = CompiledQuery()
+    self.lazy_init_lock_ = thread.allocate_lock()
+    if contents is not None: self.MergeFromString(contents)
+
+  def compiled_query(self): return self.compiled_query_
+
+  def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query_
+
+  def clear_compiled_query(self):self.has_compiled_query_ = 0; self.compiled_query_.Clear()
+
+  def has_compiled_query(self): return self.has_compiled_query_
+
+  def original_query(self):
+    if self.original_query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.original_query_ is None: self.original_query_ = Query()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.original_query_
+
+  def mutable_original_query(self): self.has_original_query_ = 1; return self.original_query()
+
+  def clear_original_query(self):
+    if self.has_original_query_:
+      self.has_original_query_ = 0;
+      if self.original_query_ is not None: self.original_query_.Clear()
+
+  def has_original_query(self): return self.has_original_query_
+
+  def count(self): return self.count_
+
+  def set_count(self, x):
+    self.has_count_ = 1
+    self.count_ = x
+
+  def clear_count(self):
+    if self.has_count_:
+      self.has_count_ = 0
+      self.count_ = 0
+
+  def has_count(self): return self.has_count_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
+    if (x.has_original_query()): self.mutable_original_query().MergeFrom(x.original_query())
+    if (x.has_count()): self.set_count(x.count())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_compiled_query_ != x.has_compiled_query_: return 0
+    if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
+    if self.has_original_query_ != x.has_original_query_: return 0
+    if self.has_original_query_ and self.original_query_ != x.original_query_: return 0
+    if self.has_count_ != x.has_count_: return 0
+    if self.has_count_ and self.count_ != x.count_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_compiled_query_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: compiled_query not set.')
+    elif not self.compiled_query_.IsInitialized(debug_strs): initialized = 0
+    if (self.has_original_query_ and not self.original_query_.IsInitialized(debug_strs)): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.compiled_query_.ByteSize())
+    if (self.has_original_query_): n += 1 + self.lengthString(self.original_query_.ByteSize())
+    if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+    return n + 1
+
+  def Clear(self):
+    self.clear_compiled_query()
+    self.clear_original_query()
+    self.clear_count()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.compiled_query_.ByteSize())
+    self.compiled_query_.OutputUnchecked(out)
+    if (self.has_original_query_):
+      out.putVarInt32(18)
+      out.putVarInt32(self.original_query_.ByteSize())
+      self.original_query_.OutputUnchecked(out)
+    if (self.has_count_):
+      out.putVarInt32(24)
+      out.putVarInt32(self.count_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_query().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_original_query().TryMerge(tmp)
+        continue
+      if tt == 24:
+        self.set_count(d.getVarInt32())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_compiled_query_:
+      res+=prefix+"compiled_query <\n"
+      res+=self.compiled_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_original_query_:
+      res+=prefix+"original_query <\n"
+      res+=self.original_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcompiled_query = 1
+  koriginal_query = 2
+  kcount = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "compiled_query",
+    2: "original_query",
+    3: "count",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1080,30 +2065,30 @@
     if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   knative_ancestor = 1
   knative_index = 2
   knative_offset = 3
   knative_limit = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "native_ancestor",
-   "native_index",
-   "native_offset",
-   "native_limit",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "native_ancestor",
+    2: "native_index",
+    3: "native_offset",
+    4: "native_limit",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1172,18 +2157,21 @@
     if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcursor = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "cursor",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.DOUBLE,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.DOUBLE,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1194,6 +2182,7 @@
   INTERNAL_ERROR =    3
   NEED_INDEX   =    4
   TIMEOUT      =    5
+  PERMISSION_DENIED =    6
 
   _ErrorCode_NAMES = {
     1: "BAD_REQUEST",
@@ -1201,6 +2190,7 @@
     3: "INTERNAL_ERROR",
     4: "NEED_INDEX",
     5: "TIMEOUT",
+    6: "PERMISSION_DENIED",
   }
 
   def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -1245,13 +2235,17 @@
     return res
 
 
-  _TEXT = (
-   "ErrorCode",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-  )
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1399,30 +2393,30 @@
     if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kindex_writes = 1
   kindex_write_bytes = 2
   kentity_writes = 3
   kentity_write_bytes = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "index_writes",
-   "index_write_bytes",
-   "entity_writes",
-   "entity_write_bytes",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_writes",
+    2: "index_write_bytes",
+    3: "entity_writes",
+    4: "entity_write_bytes",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1547,22 +2541,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   ktransaction = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "transaction",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "transaction",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1721,22 +2717,24 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kEntityGroup = 1
   kEntityentity = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "Entity",
-   "entity",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Entity",
+    2: "entity",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1932,30 +2930,30 @@
     if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kentity = 1
   ktransaction = 2
   kcomposite_index = 3
   ktrusted = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "entity",
-   "transaction",
-   "composite_index",
-   "trusted",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "entity",
+    2: "transaction",
+    3: "composite_index",
+    4: "trusted",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2080,22 +3078,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 1
   kcost = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "key",
-   "cost",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "key",
+    2: "cost",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2247,35 +3247,27 @@
     if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 6
   ktransaction = 5
   ktrusted = 4
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   None,
-   None,
-   "trusted",
-   "transaction",
-   "key",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    4: "trusted",
+    5: "transaction",
+    6: "key",
+  }, 6)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    6: ProtocolBuffer.Encoder.STRING,
+  }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2356,25 +3348,30 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcost = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "cost",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 class NextRequest(ProtocolBuffer.ProtocolMessage):
   has_cursor_ = 0
   has_count_ = 0
-  count_ = 1
+  count_ = 0
+  has_compile_ = 0
+  compile_ = 0
 
   def __init__(self, contents=None):
     self.cursor_ = Cursor()
@@ -2397,15 +3394,29 @@
   def clear_count(self):
     if self.has_count_:
       self.has_count_ = 0
-      self.count_ = 1
+      self.count_ = 0
 
   def has_count(self): return self.has_count_
 
+  def compile(self): return self.compile_
+
+  def set_compile(self, x):
+    self.has_compile_ = 1
+    self.compile_ = x
+
+  def clear_compile(self):
+    if self.has_compile_:
+      self.has_compile_ = 0
+      self.compile_ = 0
+
+  def has_compile(self): return self.has_compile_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
     if (x.has_count()): self.set_count(x.count())
+    if (x.has_compile()): self.set_compile(x.compile())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2413,6 +3424,8 @@
     if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
     if self.has_count_ != x.has_count_: return 0
     if self.has_count_ and self.count_ != x.count_: return 0
+    if self.has_compile_ != x.has_compile_: return 0
+    if self.has_compile_ and self.compile_ != x.compile_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2428,11 +3441,13 @@
     n = 0
     n += self.lengthString(self.cursor_.ByteSize())
     if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
+    if (self.has_compile_): n += 2
     return n + 1
 
   def Clear(self):
     self.clear_cursor()
     self.clear_count()
+    self.clear_compile()
 
   def OutputUnchecked(self, out):
     out.putVarInt32(10)
@@ -2441,6 +3456,9 @@
     if (self.has_count_):
       out.putVarInt32(16)
       out.putVarInt32(self.count_)
+    if (self.has_compile_):
+      out.putVarInt32(24)
+      out.putBoolean(self.compile_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2454,6 +3472,9 @@
       if tt == 16:
         self.set_count(d.getVarInt32())
         continue
+      if tt == 24:
+        self.set_compile(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2465,24 +3486,30 @@
       res+=self.cursor_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
     if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
+    if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcursor = 1
   kcount = 2
-
-  _TEXT = (
-   "ErrorCode",
-   "cursor",
-   "count",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  kcompile = 3
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "count",
+    3: "compile",
+  }, 3)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+  }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2493,6 +3520,8 @@
   more_results_ = 0
   has_keys_only_ = 0
   keys_only_ = 0
+  has_compiled_query_ = 0
+  compiled_query_ = None
 
   def __init__(self, contents=None):
     self.result_ = []
@@ -2559,6 +3588,24 @@
 
   def has_keys_only(self): return self.has_keys_only_
 
+  def compiled_query(self):
+    if self.compiled_query_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.compiled_query_
+
+  def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
+
+  def clear_compiled_query(self):
+    if self.has_compiled_query_:
+      self.has_compiled_query_ = 0;
+      if self.compiled_query_ is not None: self.compiled_query_.Clear()
+
+  def has_compiled_query(self): return self.has_compiled_query_
+
 
   def MergeFrom(self, x):
     assert x is not self
@@ -2566,6 +3613,7 @@
     for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
     if (x.has_more_results()): self.set_more_results(x.more_results())
     if (x.has_keys_only()): self.set_keys_only(x.keys_only())
+    if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
 
   def Equals(self, x):
     if x is self: return 1
@@ -2578,6 +3626,8 @@
     if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
     if self.has_keys_only_ != x.has_keys_only_: return 0
     if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
+    if self.has_compiled_query_ != x.has_compiled_query_: return 0
+    if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2589,6 +3639,7 @@
       initialized = 0
       if debug_strs is not None:
         debug_strs.append('Required field: more_results not set.')
+    if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
@@ -2597,6 +3648,7 @@
     n += 1 * len(self.result_)
     for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
     if (self.has_keys_only_): n += 2
+    if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
     return n + 2
 
   def Clear(self):
@@ -2604,6 +3656,7 @@
     self.clear_result()
     self.clear_more_results()
     self.clear_keys_only()
+    self.clear_compiled_query()
 
   def OutputUnchecked(self, out):
     if (self.has_cursor_):
@@ -2619,6 +3672,10 @@
     if (self.has_keys_only_):
       out.putVarInt32(32)
       out.putBoolean(self.keys_only_)
+    if (self.has_compiled_query_):
+      out.putVarInt32(42)
+      out.putVarInt32(self.compiled_query_.ByteSize())
+      self.compiled_query_.OutputUnchecked(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2641,6 +3698,12 @@
       if tt == 32:
         self.set_keys_only(d.getBoolean())
         continue
+      if tt == 42:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_compiled_query().TryMerge(tmp)
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2661,36 +3724,219 @@
       cnt+=1
     if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
     if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
+    if self.has_compiled_query_:
+      res+=prefix+"compiled_query <\n"
+      res+=self.compiled_query_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcursor = 1
   kresult = 2
   kmore_results = 3
   kkeys_only = 4
-
-  _TEXT = (
-   "ErrorCode",
-   "cursor",
-   "result",
-   "more_results",
-   "keys_only",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  kcompiled_query = 5
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cursor",
+    2: "result",
+    3: "more_results",
+    4: "keys_only",
+    5: "compiled_query",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class GetSchemaRequest(ProtocolBuffer.ProtocolMessage):
+  has_app_ = 0
+  app_ = ""
+  has_start_kind_ = 0
+  start_kind_ = ""
+  has_end_kind_ = 0
+  end_kind_ = ""
+  has_properties_ = 0
+  properties_ = 1
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def app(self): return self.app_
+
+  def set_app(self, x):
+    self.has_app_ = 1
+    self.app_ = x
+
+  def clear_app(self):
+    if self.has_app_:
+      self.has_app_ = 0
+      self.app_ = ""
+
+  def has_app(self): return self.has_app_
+
+  def start_kind(self): return self.start_kind_
+
+  def set_start_kind(self, x):
+    self.has_start_kind_ = 1
+    self.start_kind_ = x
+
+  def clear_start_kind(self):
+    if self.has_start_kind_:
+      self.has_start_kind_ = 0
+      self.start_kind_ = ""
+
+  def has_start_kind(self): return self.has_start_kind_
+
+  def end_kind(self): return self.end_kind_
+
+  def set_end_kind(self, x):
+    self.has_end_kind_ = 1
+    self.end_kind_ = x
+
+  def clear_end_kind(self):
+    if self.has_end_kind_:
+      self.has_end_kind_ = 0
+      self.end_kind_ = ""
+
+  def has_end_kind(self): return self.has_end_kind_
+
+  def properties(self): return self.properties_
+
+  def set_properties(self, x):
+    self.has_properties_ = 1
+    self.properties_ = x
+
+  def clear_properties(self):
+    if self.has_properties_:
+      self.has_properties_ = 0
+      self.properties_ = 1
+
+  def has_properties(self): return self.has_properties_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_app()): self.set_app(x.app())
+    if (x.has_start_kind()): self.set_start_kind(x.start_kind())
+    if (x.has_end_kind()): self.set_end_kind(x.end_kind())
+    if (x.has_properties()): self.set_properties(x.properties())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_app_ != x.has_app_: return 0
+    if self.has_app_ and self.app_ != x.app_: return 0
+    if self.has_start_kind_ != x.has_start_kind_: return 0
+    if self.has_start_kind_ and self.start_kind_ != x.start_kind_: return 0
+    if self.has_end_kind_ != x.has_end_kind_: return 0
+    if self.has_end_kind_ and self.end_kind_ != x.end_kind_: return 0
+    if self.has_properties_ != x.has_properties_: return 0
+    if self.has_properties_ and self.properties_ != x.properties_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_app_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: app not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(len(self.app_))
+    if (self.has_start_kind_): n += 1 + self.lengthString(len(self.start_kind_))
+    if (self.has_end_kind_): n += 1 + self.lengthString(len(self.end_kind_))
+    if (self.has_properties_): n += 2
+    return n + 1
+
+  def Clear(self):
+    self.clear_app()
+    self.clear_start_kind()
+    self.clear_end_kind()
+    self.clear_properties()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putPrefixedString(self.app_)
+    if (self.has_start_kind_):
+      out.putVarInt32(18)
+      out.putPrefixedString(self.start_kind_)
+    if (self.has_end_kind_):
+      out.putVarInt32(26)
+      out.putPrefixedString(self.end_kind_)
+    if (self.has_properties_):
+      out.putVarInt32(32)
+      out.putBoolean(self.properties_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        self.set_app(d.getPrefixedString())
+        continue
+      if tt == 18:
+        self.set_start_kind(d.getPrefixedString())
+        continue
+      if tt == 26:
+        self.set_end_kind(d.getPrefixedString())
+        continue
+      if tt == 32:
+        self.set_properties(d.getBoolean())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
+    if self.has_start_kind_: res+=prefix+("start_kind: %s\n" % self.DebugFormatString(self.start_kind_))
+    if self.has_end_kind_: res+=prefix+("end_kind: %s\n" % self.DebugFormatString(self.end_kind_))
+    if self.has_properties_: res+=prefix+("properties: %s\n" % self.DebugFormatBool(self.properties_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kapp = 1
+  kstart_kind = 2
+  kend_kind = 3
+  kproperties = 4
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app",
+    2: "start_kind",
+    3: "end_kind",
+    4: "properties",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 class Schema(ProtocolBuffer.ProtocolMessage):
+  has_more_results_ = 0
+  more_results_ = 0
 
   def __init__(self, contents=None):
     self.kind_ = []
@@ -2712,16 +3958,32 @@
 
   def clear_kind(self):
     self.kind_ = []
+  def more_results(self): return self.more_results_
+
+  def set_more_results(self, x):
+    self.has_more_results_ = 1
+    self.more_results_ = x
+
+  def clear_more_results(self):
+    if self.has_more_results_:
+      self.has_more_results_ = 0
+      self.more_results_ = 0
+
+  def has_more_results(self): return self.has_more_results_
+
 
   def MergeFrom(self, x):
     assert x is not self
     for i in xrange(x.kind_size()): self.add_kind().CopyFrom(x.kind(i))
+    if (x.has_more_results()): self.set_more_results(x.more_results())
 
   def Equals(self, x):
     if x is self: return 1
     if len(self.kind_) != len(x.kind_): return 0
     for e1, e2 in zip(self.kind_, x.kind_):
       if e1 != e2: return 0
+    if self.has_more_results_ != x.has_more_results_: return 0
+    if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
@@ -2734,16 +3996,21 @@
     n = 0
     n += 1 * len(self.kind_)
     for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSize())
+    if (self.has_more_results_): n += 2
     return n + 0
 
   def Clear(self):
     self.clear_kind()
+    self.clear_more_results()
 
   def OutputUnchecked(self, out):
     for i in xrange(len(self.kind_)):
       out.putVarInt32(10)
       out.putVarInt32(self.kind_[i].ByteSize())
       self.kind_[i].OutputUnchecked(out)
+    if (self.has_more_results_):
+      out.putVarInt32(16)
+      out.putBoolean(self.more_results_)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -2754,6 +4021,9 @@
         d.skip(length)
         self.add_kind().TryMerge(tmp)
         continue
+      if tt == 16:
+        self.set_more_results(d.getBoolean())
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -2768,20 +4038,264 @@
       res+=e.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
       cnt+=1
+    if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkind = 1
-
-  _TEXT = (
-   "ErrorCode",
-   "kind",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  kmore_results = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "kind",
+    2: "more_results",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
+  has_model_key_ = 0
+  has_size_ = 0
+  size_ = 0
+
+  def __init__(self, contents=None):
+    self.model_key_ = Reference()
+    if contents is not None: self.MergeFromString(contents)
+
+  def model_key(self): return self.model_key_
+
+  def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key_
+
+  def clear_model_key(self):self.has_model_key_ = 0; self.model_key_.Clear()
+
+  def has_model_key(self): return self.has_model_key_
+
+  def size(self): return self.size_
+
+  def set_size(self, x):
+    self.has_size_ = 1
+    self.size_ = x
+
+  def clear_size(self):
+    if self.has_size_:
+      self.has_size_ = 0
+      self.size_ = 0
+
+  def has_size(self): return self.has_size_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
+    if (x.has_size()): self.set_size(x.size())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_model_key_ != x.has_model_key_: return 0
+    if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
+    if self.has_size_ != x.has_size_: return 0
+    if self.has_size_ and self.size_ != x.size_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_model_key_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: model_key not set.')
+    elif not self.model_key_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_size_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: size not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.model_key_.ByteSize())
+    n += self.lengthVarInt64(self.size_)
+    return n + 2
+
+  def Clear(self):
+    self.clear_model_key()
+    self.clear_size()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.model_key_.ByteSize())
+    self.model_key_.OutputUnchecked(out)
+    out.putVarInt32(16)
+    out.putVarInt64(self.size_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_model_key().TryMerge(tmp)
+        continue
+      if tt == 16:
+        self.set_size(d.getVarInt64())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_model_key_:
+      res+=prefix+"model_key <\n"
+      res+=self.model_key_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kmodel_key = 1
+  ksize = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "model_key",
+    2: "size",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
+  has_start_ = 0
+  start_ = 0
+  has_end_ = 0
+  end_ = 0
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def start(self): return self.start_
+
+  def set_start(self, x):
+    self.has_start_ = 1
+    self.start_ = x
+
+  def clear_start(self):
+    if self.has_start_:
+      self.has_start_ = 0
+      self.start_ = 0
+
+  def has_start(self): return self.has_start_
+
+  def end(self): return self.end_
+
+  def set_end(self, x):
+    self.has_end_ = 1
+    self.end_ = x
+
+  def clear_end(self):
+    if self.has_end_:
+      self.has_end_ = 0
+      self.end_ = 0
+
+  def has_end(self): return self.has_end_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_start()): self.set_start(x.start())
+    if (x.has_end()): self.set_end(x.end())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_start_ != x.has_start_: return 0
+    if self.has_start_ and self.start_ != x.start_: return 0
+    if self.has_end_ != x.has_end_: return 0
+    if self.has_end_ and self.end_ != x.end_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_start_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: start not set.')
+    if (not self.has_end_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: end not set.')
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.start_)
+    n += self.lengthVarInt64(self.end_)
+    return n + 2
+
+  def Clear(self):
+    self.clear_start()
+    self.clear_end()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt64(self.start_)
+    out.putVarInt32(16)
+    out.putVarInt64(self.end_)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_start(d.getVarInt64())
+        continue
+      if tt == 16:
+        self.set_end(d.getVarInt64())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
+    if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kstart = 1
+  kend = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "start",
+    2: "end",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2865,18 +4379,198 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kindex = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "index",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class ActionRequest(ProtocolBuffer.ProtocolMessage):
+  has_transaction_ = 0
+  has_action_ = 0
+
+  def __init__(self, contents=None):
+    self.transaction_ = Transaction()
+    self.action_ = Action()
+    if contents is not None: self.MergeFromString(contents)
+
+  def transaction(self): return self.transaction_
+
+  def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
+
+  def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
+
+  def has_transaction(self): return self.has_transaction_
+
+  def action(self): return self.action_
+
+  def mutable_action(self): self.has_action_ = 1; return self.action_
+
+  def clear_action(self):self.has_action_ = 0; self.action_.Clear()
+
+  def has_action(self): return self.has_action_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+    if (x.has_action()): self.mutable_action().MergeFrom(x.action())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_transaction_ != x.has_transaction_: return 0
+    if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+    if self.has_action_ != x.has_action_: return 0
+    if self.has_action_ and self.action_ != x.action_: return 0
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_transaction_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: transaction not set.')
+    elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
+    if (not self.has_action_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: action not set.')
+    elif not self.action_.IsInitialized(debug_strs): initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    n += self.lengthString(self.transaction_.ByteSize())
+    n += self.lengthString(self.action_.ByteSize())
+    return n + 2
+
+  def Clear(self):
+    self.clear_transaction()
+    self.clear_action()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(10)
+    out.putVarInt32(self.transaction_.ByteSize())
+    self.transaction_.OutputUnchecked(out)
+    out.putVarInt32(18)
+    out.putVarInt32(self.action_.ByteSize())
+    self.action_.OutputUnchecked(out)
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 10:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_transaction().TryMerge(tmp)
+        continue
+      if tt == 18:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_action().TryMerge(tmp)
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_transaction_:
+      res+=prefix+"transaction <\n"
+      res+=self.transaction_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_action_:
+      res+=prefix+"action <\n"
+      res+=self.action_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  ktransaction = 1
+  kaction = 2
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "transaction",
+    2: "action",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class ActionResponse(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    pass
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+
+  def Equals(self, x):
+    if x is self: return 1
+    return 1
+
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    return initialized
+
+  def ByteSize(self):
+    n = 0
+    return n + 0
+
+  def Clear(self):
+    pass
+
+  def OutputUnchecked(self, out):
+    pass
+
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    return res
+
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+  }, 0)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2957,20 +4651,23 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kcost = 1
 
-  _TEXT = (
-   "ErrorCode",
-   "cost",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "cost",
+  }, 1)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+  }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['Transaction','Query','Query_Filter','Query_Order','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','Schema','CompositeIndices','CommitResponse']
+__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','RunCompiledQueryRequest','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','ActionRequest','ActionResponse','CommitResponse']
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/datastore/datastore_v3_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore protocol buffer definition.
+
+Proto2 compiler expects generated file names to follow specific pattern,
+which is not the case for the datastore_pb.py (should be datastore_v3_pb.py).
+This file with the expected name redirects to the real legacy file.
+"""
+
+
+from google.appengine.datastore.datastore_pb import *
--- a/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/datastore/entity_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -783,6 +783,10 @@
       res+=prefix+"}\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kint64Value = 1
   kbooleanValue = 2
   kstringValue = 3
@@ -803,70 +807,51 @@
   kReferenceValuePathElementid = 16
   kReferenceValuePathElementname = 17
 
-  _TEXT = (
-   "ErrorCode",
-   "int64Value",
-   "booleanValue",
-   "stringValue",
-   "doubleValue",
-   "PointValue",
-   "x",
-   "y",
-   "UserValue",
-   "email",
-   "auth_domain",
-   "nickname",
-   "ReferenceValue",
-   "app",
-   "PathElement",
-   "type",
-   "id",
-   "name",
-   "gaiaid",
-   "obfuscated_gaiaid",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.DOUBLE,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "int64Value",
+    2: "booleanValue",
+    3: "stringValue",
+    4: "doubleValue",
+    5: "PointValue",
+    6: "x",
+    7: "y",
+    8: "UserValue",
+    9: "email",
+    10: "auth_domain",
+    11: "nickname",
+    12: "ReferenceValue",
+    13: "app",
+    14: "PathElement",
+    15: "type",
+    16: "id",
+    17: "name",
+    18: "gaiaid",
+    19: "obfuscated_gaiaid",
+  }, 19)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.DOUBLE,
+    5: ProtocolBuffer.Encoder.STARTGROUP,
+    6: ProtocolBuffer.Encoder.DOUBLE,
+    7: ProtocolBuffer.Encoder.DOUBLE,
+    8: ProtocolBuffer.Encoder.STARTGROUP,
+    9: ProtocolBuffer.Encoder.STRING,
+    10: ProtocolBuffer.Encoder.STRING,
+    11: ProtocolBuffer.Encoder.STRING,
+    12: ProtocolBuffer.Encoder.STARTGROUP,
+    13: ProtocolBuffer.Encoder.STRING,
+    14: ProtocolBuffer.Encoder.STARTGROUP,
+    15: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.NUMERIC,
+    17: ProtocolBuffer.Encoder.STRING,
+    18: ProtocolBuffer.Encoder.NUMERIC,
+    19: ProtocolBuffer.Encoder.STRING,
+  }, 19, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -888,6 +873,7 @@
   GD_PHONENUMBER =   11
   GD_POSTALADDRESS =   12
   GD_RATING    =   13
+  BLOBKEY      =   17
 
   _Meaning_NAMES = {
     14: "BLOB",
@@ -906,6 +892,7 @@
     11: "GD_PHONENUMBER",
     12: "GD_POSTALADDRESS",
     13: "GD_RATING",
+    17: "BLOBKEY",
   }
 
   def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
@@ -1092,34 +1079,33 @@
     if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kmeaning = 1
   kmeaning_uri = 2
   kname = 3
   kvalue = 5
   kmultiple = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "meaning",
-   "meaning_uri",
-   "name",
-   "multiple",
-   "value",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "meaning",
+    2: "meaning_uri",
+    3: "name",
+    4: "multiple",
+    5: "value",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1321,30 +1307,30 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kElementGroup = 1
   kElementtype = 2
   kElementid = 3
   kElementname = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "Element",
-   "type",
-   "id",
-   "name",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Element",
+    2: "type",
+    3: "id",
+    4: "name",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1447,58 +1433,24 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp = 13
   kpath = 14
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   "app",
-   "path",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    13: "app",
+    14: "path",
+  }, 14)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    13: ProtocolBuffer.Encoder.STRING,
+    14: ProtocolBuffer.Encoder.STRING,
+  }, 14, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -1682,34 +1634,33 @@
     if self.has_obfuscated_gaiaid_: res+=prefix+("obfuscated_gaiaid: %s\n" % self.DebugFormatString(self.obfuscated_gaiaid_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kemail = 1
   kauth_domain = 2
   knickname = 3
   kgaiaid = 4
   kobfuscated_gaiaid = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "email",
-   "auth_domain",
-   "nickname",
-   "gaiaid",
-   "obfuscated_gaiaid",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "email",
+    2: "auth_domain",
+    3: "nickname",
+    4: "gaiaid",
+    5: "obfuscated_gaiaid",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2012,6 +1963,10 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kkey = 13
   kentity_group = 16
   kowner = 17
@@ -2020,64 +1975,27 @@
   kproperty = 14
   kraw_property = 15
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   None,
-   None,
-   "kind",
-   "kind_uri",
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   None,
-   "key",
-   "property",
-   "raw_property",
-   "entity_group",
-   "owner",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.MAX_TYPE,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    4: "kind",
+    5: "kind_uri",
+    13: "key",
+    14: "property",
+    15: "raw_property",
+    16: "entity_group",
+    17: "owner",
+  }, 17)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.STRING,
+    13: ProtocolBuffer.Encoder.STRING,
+    14: ProtocolBuffer.Encoder.STRING,
+    15: ProtocolBuffer.Encoder.STRING,
+    16: ProtocolBuffer.Encoder.STRING,
+    17: ProtocolBuffer.Encoder.STRING,
+  }, 17, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2182,22 +2100,24 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kindex_id = 1
   kvalue = 2
 
-  _TEXT = (
-   "ErrorCode",
-   "index_id",
-   "value",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "index_id",
+    2: "value",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2443,34 +2363,33 @@
       cnt+=1
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kentity_type = 1
   kancestor = 5
   kPropertyGroup = 2
   kPropertyname = 3
   kPropertydirection = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "entity_type",
-   "Property",
-   "name",
-   "direction",
-   "ancestor",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "entity_type",
+    2: "Property",
+    3: "name",
+    4: "direction",
+    5: "ancestor",
+  }, 5)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STARTGROUP,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+    5: ProtocolBuffer.Encoder.NUMERIC,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -2649,30 +2568,30 @@
     if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kapp_id = 1
   kid = 2
   kdefinition = 3
   kstate = 4
 
-  _TEXT = (
-   "ErrorCode",
-   "app_id",
-   "id",
-   "definition",
-   "state",
-  )
-
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.NUMERIC,
-
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "app_id",
+    2: "id",
+    3: "definition",
+    4: "state",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.NUMERIC,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.NUMERIC,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
--- a/thirdparty/google_appengine/google/appengine/dist/_library.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/_library.py	Sun Sep 06 23:31:53 2009 +0200
@@ -75,7 +75,10 @@
 
 PACKAGES = {
     'django': (DjangoVersion,
-               {'1.0': None, '0.96': None}),
+               {'0.96': None,
+                '1.0': None,
+                '1.1': None,
+                }),
 
 
 
--- a/thirdparty/google_appengine/google/appengine/dist/httplib.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/httplib.py	Sun Sep 06 23:31:53 2009 +0200
@@ -171,7 +171,7 @@
     self._url = selector
 
   def putheader(self, header, *lines):
-    line = '\r\n\t'.join(lines)
+    line = '\r\n\t'.join([str(line) for line in lines])
     self.headers.append((header, line))
 
   def endheaders(self):
@@ -295,7 +295,7 @@
 
   def putheader(self, header, *values):
     "The superclass allows only one value argument."
-    self._conn.putheader(header, '\r\n\t'.join(values))
+    self._conn.putheader(header, '\r\n\t'.join([str(v) for v in values]))
 
   def getreply(self):
     """Compat definition since superclass does not define it.
--- a/thirdparty/google_appengine/google/appengine/dist/py_zipimport.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/dist/py_zipimport.py	Sun Sep 06 23:31:53 2009 +0200
@@ -220,11 +220,14 @@
 
   def get_data(self, fullpath):
     """Return (binary) content of a data file in the zipfile."""
-    required_prefix = os.path.join(self.archive, '')
-    if not fullpath.startswith(required_prefix):
-      raise IOError('Path %r doesn\'t start with zipfile name %r' %
-                    (fullpath, required_prefix))
-    relpath = fullpath[len(required_prefix):]
+    prefix = os.path.join(self.archive, '')
+    if fullpath.startswith(prefix):
+      relpath = fullpath[len(prefix):]
+    elif os.path.isabs(fullpath):
+      raise IOError('Absolute path %r doesn\'t start with zipfile name %r' %
+                    (fullpath, prefix))
+    else:
+      relpath = fullpath
     try:
       return self.zipfile.read(relpath)
     except KeyError:
--- a/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -119,6 +119,8 @@
       'interactive_execute_path': base_path + InteractiveExecuteHandler.PATH,
       'memcache_path': base_path + MemcachePageHandler.PATH,
       'queues_path': base_path + QueuesPageHandler.PATH,
+      'xmpp_path': base_path + XMPPPageHandler.PATH,
+      'inboundmail_path': base_path + InboundMailPageHandler.PATH,
     }
     if HAVE_CRON:
       values['cron_path'] = base_path + CronPageHandler.PATH
@@ -248,6 +250,34 @@
     self.generate('cron.html', values)
 
 
+class XMPPPageHandler(BaseRequestHandler):
+  """Tests XMPP requests."""
+  PATH = '/xmpp'
+
+  def get(self):
+    """Shows template displaying the XMPP."""
+    xmpp_configured = True
+    values = {
+      'xmpp_configured': xmpp_configured,
+      'request': self.request
+    }
+    self.generate('xmpp.html', values)
+
+
+class InboundMailPageHandler(BaseRequestHandler):
+  """Tests Mail requests."""
+  PATH = '/inboundmail'
+
+  def get(self):
+    """Shows template displaying the Inbound Mail form."""
+    inboundmail_configured = True
+    values = {
+      'inboundmail_configured': inboundmail_configured,
+      'request': self.request
+    }
+    self.generate('inboundmail.html', values)
+
+
 class QueuesPageHandler(BaseRequestHandler):
   """Shows information about configured (and default) task queues."""
   PATH = '/queues'
@@ -1214,7 +1244,12 @@
 
 
 def _ParseCronYaml():
-  """Load the cron.yaml file and parse it."""
+  """Loads the cron.yaml file and parses it.
+
+  The CWD of the dev_appserver is the root of the application here.
+
+  Returns a dict representing the contents of cron.yaml.
+  """
   cronyaml_files = 'cron.yaml', 'cron.yml'
   for cronyaml in cronyaml_files:
     try:
@@ -1240,6 +1275,8 @@
     ('.*' + ImageHandler.PATH, ImageHandler),
     ('.*' + QueuesPageHandler.PATH, QueuesPageHandler),
     ('.*' + TasksPageHandler.PATH, TasksPageHandler),
+    ('.*' + XMPPPageHandler.PATH, XMPPPageHandler),
+    ('.*' + InboundMailPageHandler.PATH, InboundMailPageHandler),
     ('.*', DefaultPageHandler),
   ]
   if HAVE_CRON:
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/base.html	Sun Sep 06 23:31:53 2009 +0200
@@ -10,29 +10,29 @@
   </head>
   <body {% block bodyattributes %}{% endblock %}>
     <div class="g-doc">
-    
+
     <div id="hd" class="g-section">
 
       <div class="g-section">
         <img id="ae-logo" src="./images/google.gif" width="153" height="47"
          alt="Google App Engine"/>
       </div>
-      
+
       <div id="ae-appbar-lrg" class="g-section">
         <h1>{{ application_name }} Development Console</h1>
       </div>
-      
+
     </div>
-    
-    
+
+
     <div id="bd" class="g-section">
-    
+
       <div class="g-section g-tpl-160">
-    
+
         <div id="ae-lhs-nav" class="g-unit g-first">
-    
+
           <div id="ae-nav" class="g-c">
-        
+
             <ul id="menu">
               <li><a href="{{ datastore_path }}">Datastore Viewer</a></li>
               <li><a href="{{ interactive_path }}">Interactive Console</a></li>
@@ -41,18 +41,22 @@
               {% if cron_path %}
               <li><a href="{{ cron_path }}">Cron Jobs</a></li>
               {% endif %}
+              <li><a href="{{ xmpp_path }}">XMPP</a></li>
+              {% comment %}
+              <li><a href="{{ inboundmail_path }}">Inbound Mail</a></li>
+              {% endcomment %}
             </ul>
-        
+
           </div>
-        
+
         </div>
-        
+
         <div id="ae-content" class="g-unit">
           {% block body %}{% endblock %}
         </div>
-    
+
     </div>
-    
+
         <div id="ft">
           <p>
             &copy;2009 Google
@@ -83,7 +87,7 @@
     function makeSelected(e) {
       e.className = "ae-nav-selected";
     }
-    
+
     walk(document.getElementById("menu"), isCurrentLink, makeSelected);
 
     //]]>
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/ae.css	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/ae.css	Sun Sep 06 23:31:53 2009 +0200
@@ -151,4 +151,20 @@
 }
 .ae-table .ae-pager {
   background-color: #c5d7ef;
-}
\ No newline at end of file
+}
+
+.ae-errorbox {
+  border: 1px solid #f00;
+  background-color: #fee;
+  margin-bottom: 1em;
+  padding: 1em;
+  display: inline-block;
+}
+
+.ae-message {
+  border: 1px solid #e5ecf9;
+  background-color: #f6f9ff;
+  margin-bottom: 1em;
+  padding: 1em;
+  display: inline-block;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/inboundmail.css	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,19 @@
+#inboundmail label {
+  display: block;
+  font-weight: bold;
+}
+#inboundmail legend {
+  font-weight: bold;
+}
+#inboundmail .radio label {
+  display: inline;
+  font-weight: normal;
+}
+
+#inboundmail fieldset,
+#inboundmail .fieldset {
+  margin-bottom: 8px;
+}
+#inboundmail-submit {
+  margin-top: 2em;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/css/xmpp.css	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,19 @@
+#xmpp label {
+  display: block;
+  font-weight: bold;
+}
+#xmpp legend {
+  font-weight: bold;
+}
+#xmpp .radio label {
+  display: inline;
+  font-weight: normal;
+}
+
+#xmpp fieldset,
+#xmpp .fieldset {
+  margin-bottom: 8px;
+}
+#xmpp-submit {
+  margin-top: 2em;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/inboundmail.html	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,158 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - Inbound Mail{% endblock %}
+
+{% block breadcrumbs %}
+  <span class="item"><a href="">Email</a></span>
+{% endblock %}
+
+{% block head %}
+
+  <style type="text/css">{% include "css/inboundmail.css" %}</style>
+  <script type="text/javascript">
+    {% include "js/webhook.js" %}
+    {% include "js/multipart_form_data.js" %}
+    {% include "js/rfc822_date.js" %}
+
+    var feedbackEl;
+    var formEl;
+    var payloadEl;
+    var fromEl;
+    var toEl;
+    var ccEl;
+    var subjectEl;
+    var bodyEl;
+    var contentLengthEl;
+    //var contentTypeEl;
+
+    var sendInboundMailWebhook = function() {
+
+      if (!feedbackEl) {
+        feedbackEl = document.getElementById('feedback');
+        formEl = document.getElementById('inboundmail-form');
+        fromEl = document.getElementById('from');
+        toEl = document.getElementById('to');
+        ccEl = document.getElementById('cc');
+        subjectEl = document.getElementById('subject');
+        bodyEl = document.getElementById('body');
+        payloadEl = document.getElementById('payload');
+        contentLengthEl = document.getElementById('content-length');
+      }
+
+      var from = fromEl.value;
+      var to = toEl.value;
+      var cc = ccEl.value;
+      var subject = subjectEl.value;
+      var body = bodyEl.value;
+
+      if (!to || !from || !body) {
+        feedbackEl.className = 'ae-errorbox';
+        feedbackEl.innerHTML = 'From, To and Message body are required.';
+        return;
+      }
+
+      feedbackEl.className = 'ae-message';
+      feedbackEl.innerHTML = 'Sending mail message...';
+
+      var mpfd = new MultipartFormData();
+      mpfd.addHeader('MIME-Version', '1.0');
+      mpfd.addHeader('Date', RFC822Date.format(new Date()));
+      mpfd.addHeader('From', from);
+      mpfd.addHeader('To', to);
+      if (cc) {
+        mpfd.addHeader('Cc', cc);
+      }
+      mpfd.addHeader('Subject', subject);
+      mpfd.addHeader('Content-Type', 'multipart/alternative; ' +
+          'boundary=' + mpfd.boundary);
+      mpfd.addPart(null, body, 'text/plain; charset=UTF-8');
+      mpfd.addPart(null, body, 'text/html; charset=UTF-8');
+
+      payloadEl.value = mpfd.toString();
+
+      contentLengthEl = payloadEl.value.length;
+
+      formEl.action = '/_ah/mail/' + escape(to);
+
+      (new Webhook('inboundmail-form')).run(handleInboundMailResult);
+
+      // Prevents actual form posts.
+      return false;
+    };
+
+    var handleInboundMailResult = function(hook, req, error) {
+      if (error != null || req == null || req.status != 200) {
+        feedbackEl.className = 'ae-errorbox';
+        feedbackEl.innerHTML = 'Message send failure<br>' +
+            req.responseText;
+      } else {
+        var timestamp;
+        var dateString = new Date().toString();
+        var match = dateString.match(/(\d\d:\d\d:\d\d).+\((.+)\)/);
+        if (!match || !match[0] || !match[2]) {
+          timestamp = dateString;
+        } else {
+          timestamp = match[1] + ' ' + match[2];
+        }
+
+        feedbackEl.className = 'ae-message';
+        feedbackEl.innerHTML = 'Message has been sent at ' + timestamp;
+      }
+    };
+
+  </script>
+{% endblock %}
+
+{% block body %}
+<div id="inboundmail">
+  <h3>Email</h3>
+  {% if inboundmail_configured %}{% else %}
+    <div class="ae-errorbox">
+      Inbound mail is not yet configured properly in your app.yaml in the services section.
+    </div>
+  {% endif %}
+  <div id="feedback"></div>
+  <form id="inboundmail-form"
+    action="/_ah/mail/" method="post"
+    onsubmit="sendInboundMailWebhook(); return false">
+
+    <input type="hidden" name="payload" id="payload">
+    <input type="hidden" id="content-type" name="header:Content-Type" value="message/rfc822">
+    <input type="hidden" id="content-length" name="header:Content-Length">
+
+    <div class="fieldset">
+      <label for="from">From:</label>
+      <input type="text" id="from" name="from" size="40">
+    </div>
+
+    <div class="fieldset">
+      <label for="to">To:</label>
+      <input type="text" id="to" name="to" size="40">
+    </div>
+
+    <div class="fieldset">
+      <label for="cc">Cc:</label>
+      <input type="text" id="cc" name="cc" size="40">
+    </div>
+
+    <div class="fieldset">
+      <label for="subject">Subject:</label>
+      <input type="text" id="subject" name="subject" size="40">
+    </div>
+
+    <div id="body-c" class="fieldset">
+      <label for="body">Message body (plain text):</label>
+      <textarea id="body" name="body" rows="10" cols="50"></textarea>
+    </div>
+
+    <div id="inboundmail-submit">
+      <input type="submit" value="Send Email">
+    </div>
+
+  </form>
+</div>
+
+{% endblock %}
+
+{% block final %}
+{% endblock %}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/multipart_form_data.js	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,125 @@
+// Copyright 2009 Google Inc.  All Rights Reserved.
+
+/**
+ * A multipart form data construction class for XHR.
+ * @see http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
+ * @constructor
+ */
+var MultipartFormData = function() {
+  /**
+   * @type {Array}
+   */
+  this.headers = [];
+
+  /**
+   * @type {Array}
+   */
+  this.parts = [];
+
+  /**
+   * A random string for the boundary.
+   * @type {string}
+   */
+  this.boundary = MultipartFormData.getRandomBoundary();
+};
+
+
+/**
+ * @type {string}
+ */
+MultipartFormData.CRLF = '\r\n';
+
+
+/**
+ * @type {string}
+ * @private
+ */
+MultipartFormData.TEN_CHARS_ =
+
+
+/**
+ * Generates a random number and some random characters from it.
+ */
+MultipartFormData.getRandomBoundary = function() {
+  var anyTenCharacters = 'DiStRIcT10';
+  var randomNumber = Math.floor(Math.random() * 10000000);
+  var nums = randomNumber.toString().split('');
+  var randomChars = '';
+  for (var i = 0, num; num = nums[i]; i++) {
+    randomChars += anyTenCharacters[num];
+  }
+  return randomChars + '-' + randomNumber;
+};
+
+
+/**
+ * @param {string} name The name for this header.
+ * @param {string} value The value for this header.
+ */
+MultipartFormData.prototype.addHeader = function(name, value) {
+  this.headers.push({
+    'name': name,
+    'value': value
+  });
+};
+
+
+/**
+ * @param {?string} name The name for this part.
+ * @param {string} value The value for this part.
+ * @param {string} opt_contentType Content-type for this part.
+ * @param {string} opt_contentDisposition Content disposition for this part.
+ * @param {string} opt_filename The filename for this part
+ */
+MultipartFormData.prototype.addPart = function(name, value, opt_contentType,
+    opt_contentDisposition, opt_filename) {
+  var contentType = opt_contentType || null;
+  var contentDisposition = opt_contentDisposition || null;
+  var filename = opt_filename || null;
+  this.parts.push({
+    'name': name,
+    'value': value,
+    'contentType': contentType,
+    'contentDisposition': contentDisposition,
+    'filename': filename
+  });
+};
+
+/**
+ * @return {string} The string to set as a payload.
+ */
+MultipartFormData.prototype.toString = function() {
+  var lines = [];
+
+  for (var i = 0, header; header = this.headers[i]; i++) {
+    lines.push(header['name'] + ': ' + header['value']);
+  }
+  if (this.headers.length > 0) {
+    lines.push('');
+  }
+
+  for (var i = 0, part; part = this.parts[i]; i++) {
+    lines.push('--' + this.boundary);
+
+    if (part['contentDisposition']) {
+      var contentDisposition = 'Content-Disposition: form-data; ';
+      contentDisposition += 'name="' + part['name'] + '"';
+      if (part['filename']) {
+        contentDisposition += '; filename="' + part['filename'] + '"';
+      }
+      lines.push(contentDisposition);
+    }
+
+    if (part['contentType']) {
+      lines.push('Content-Type: ' + part['contentType']);
+    }
+
+    lines.push('');
+    lines.push(part['value']);
+  }
+
+  lines.push('--' + this.boundary + '--');
+
+  return lines.join(MultipartFormData.CRLF) + MultipartFormData.CRLF;
+};
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/rfc822_date.js	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,70 @@
+// Copyright 2009 Google Inc.  All Rights Reserved.
+
+var RFC822Date = {};
+
+/**
+ * Return a DateTime in RFC822 format.
+ * @see http://www.w3.org/Protocols/rfc822/#z28
+ * @param {Date} date A Date object.
+ * @param {string} opt_tzo The timezone offset.
+ */
+RFC822Date.format = function(date, opt_tzo) {
+  var tzo = opt_tzo || RFC822Date.getTZO(date.getTimezoneOffset());
+  var rfc822Date = RFC822Date.DAYS[date.getDay()] + ', ';
+  rfc822Date += RFC822Date.padZero(date.getDate()) + ' ';
+  rfc822Date += RFC822Date.MONTHS[date.getMonth()] + ' ';
+  rfc822Date += date.getFullYear() + ' ';
+  rfc822Date += RFC822Date.padZero(date.getHours()) + ':';
+  rfc822Date += RFC822Date.padZero(date.getMinutes()) + ':';
+  rfc822Date += RFC822Date.padZero(date.getSeconds()) + ' ' ;
+  rfc822Date += tzo;
+  return rfc822Date;
+};
+
+
+/**
+ * @type {Array}
+ */
+RFC822Date.MONTHS = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+                     'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
+
+
+/**
+ * @type {Array}
+ */
+RFC822Date.DAYS = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
+
+
+/**
+ * Pads a value with a 0 if it is less than 10;
+ * @param {number|string}
+ * @return {string}
+ */
+RFC822Date.padZero = function(val) {
+  val = val + ''; // cast into string
+  if (val.length < 2) {
+    val = '0' + val;
+  }
+  return val;
+};
+
+
+/**
+ * Returns a timezone offset in the format +|-dddd.
+ * @param {String} tzo A time zone offset from GMT in minutes.
+ * @return {string} The time zone offset as a string.
+ */
+RFC822Date.getTZO = function(tzo) {
+  var hours = Math.floor(tzo / 60);
+  var tzoFormatted = hours > 0 ? '-' : '+';
+
+  var absoluteHours = Math.abs(hours);
+  tzoFormatted += absoluteHours < 10 ? '0' : '';
+  tzoFormatted += absoluteHours;
+
+  var moduloMinutes = Math.abs(tzo % 60);
+  tzoFormatted += moduloMinutes == 0 ? '00' : moduloMinutes
+
+  return tzoFormatted;
+};
+
--- a/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/webhook.js	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/js/webhook.js	Sun Sep 06 23:31:53 2009 +0200
@@ -33,7 +33,7 @@
       this.payload = value;
     }
   }
-  
+
   if (this.action == '') {
     return 'action not found';
   }
@@ -61,7 +61,20 @@
     callback(this, req, e);
     return;
   }
-  callback(this, req, null);
+
+  // If the responseText matches our <form action="/_ah/login then the
+  // user is not logged in as an Administrator so we'll fake the request.
+  if (req.responseText.match(/<form[^>]+_ah\/login/)) {
+    var fakeReq = {
+      'status': 403,
+      'responseText': 'Current logged in user is not authorized ' +
+                      'to view this page'
+    }
+    fakeReq.getAllResponseHeaders = function(){};
+    callback(this, fakeReq, null);
+  } else {
+    callback(this, req, null);
+  }
 };
 
 Webhook.prototype.run = function(callback) {
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/admin/templates/xmpp.html	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,234 @@
+{% extends "base.html" %}
+
+{% block title %}{{ application_name }} Development Console - XMPP{% endblock %}
+
+{% block breadcrumbs %}
+  <span class="item"><a href="">XMPP</a></span>
+{% endblock %}
+
+{% block head %}
+  <style type="text/css">{% include "css/xmpp.css" %}</style>
+  <script type="text/javascript">
+    {% include "js/webhook.js" %}
+    {% include "js/multipart_form_data.js" %}
+
+    var xmppFeedbackEl;
+    var xmppForm;
+    var payloadEl;
+    var fromEl;
+    var toEl;
+    var chatEl;
+    var contentLengthEl;
+    var contentTypeEl;
+
+    var sendXmppWebhook = function() {
+
+      if (!xmppFeedbackEl) {
+        xmppFeedbackEl = document.getElementById('xmpp-feedback');
+        xmppForm = document.getElementById('xmpp-form');
+        fromEl = document.getElementById('from');
+        toEl = document.getElementById('to');
+        chatEl = document.getElementById('chat');
+        payloadEl = document.getElementById('payload');
+        contentTypeEl = document.getElementById('content-type');
+      }
+
+      var to = toEl.value;
+      var from = fromEl.value;
+      var body = chatEl.value;
+
+      if (!to || !from) {
+        xmppFeedbackEl.className = 'ae-errorbox';
+        xmppFeedbackEl.innerHTML = 'From and To are required.';
+        return;
+      }
+
+      xmppFeedbackEl.className = 'ae-message';
+      xmppFeedbackEl.innerHTML = 'Sending XMPP message...';
+
+      var formData = new MultipartFormData();
+      formData.addPart('to', to, null, 'form-data');
+      formData.addPart('from', from, null, 'form-data');
+      formData.addPart('body', body, null, 'form-data');
+      formData.addPart('stanza', buildXmlStanza(from, to, body), 'text/xml', 'form-data');
+
+      payloadEl.value = formData.toString();
+      contentTypeEl.value = 'multipart/form-data; boundary=' +
+          formData.boundary;
+
+      (new Webhook('xmpp-form')).run(handleXmppResult);
+
+      // Prevents actual form posts.
+      return false;
+    };
+
+    var handleXmppResult = function(hook, req, error) {
+      if (error != null || req == null || req.status != 200) {
+        xmppFeedbackEl.className = 'ae-errorbox';
+        xmppFeedbackEl.innerHTML = 'Message send failure<br>' +
+            req.responseText;
+      } else {
+        var timestamp;
+        var dateString = new Date().toString();
+        var match = dateString.match(/(\d\d:\d\d:\d\d).+\((.+)\)/);
+        if (!match || !match[0] || !match[2]) {
+          timestamp = dateString;
+        } else {
+          timestamp = match[1] + ' ' + match[2];
+        }
+
+        xmppFeedbackEl.className = 'ae-message';
+        xmppFeedbackEl.innerHTML = 'Message has been sent at ' + timestamp;
+      }
+    };
+
+    var buildXmlStanza = function(from, to, body) {
+      var xml = '<message from="' + from + '" '+
+          'to="' + to + '">' +
+          '<body>' + body + '</body>' +
+          '</message>';
+      return xml;
+    };
+  </script>
+{% endblock %}
+
+{% block body %}
+<div id="xmpp">
+  <h3>XMPP</h3>
+  {% if xmpp_configured %}{% else %}
+    <div class="ae-errorbox">
+      XMPP is not yet configured properly in your app.yaml, in the services section.
+    </div>
+  {% endif %}
+  <div id="xmpp-feedback"></div>
+  <form id="xmpp-form"
+    action="/_ah/xmpp/message/chat/" method="post"
+    onsubmit="sendXmppWebhook(); return false">
+
+    <input type="hidden" name="payload" id="payload">
+    <input type="hidden" id="content-type" name="header:Content-Type">
+
+    <fieldset>
+      <input type="hidden" name="message_type" id="message-type-chat" value="chat">
+      <!--
+      <legend>Message Type:</legend>
+      <div class="radio">
+        <input type="radio" name="message_type" id="message-type-chat" value="chat">
+        <label for="message-type-chat">Chat message</label>
+      </div>
+
+      <div class="radio">
+        <input type="radio" name="message_type" id="message-type-xml" value="xml">
+        <label for="message-type-xml">XML stanza</label>
+      </div>
+
+      <div class="radio">
+        <input type="radio" name="message_type" id="message-type-presence" value="presence">
+        <label for="message-type-presence">Presence</label>
+      </div>
+      -->
+    </fieldset>
+
+    <div class="fieldset">
+      <label for="from">From:</label>
+      <input type="text" id="from" name="from" size="40">
+    </div>
+
+
+    <div class="fieldset">
+      <label for="to">To:</label>
+      <input type="text" id="to" name="to" size="40">
+    </div>
+
+
+    <div id="chat-c" class="fieldset">
+      <label for="chat">Chat (plain text):</label>
+      <textarea id="chat" name="chat" rows="10" cols="50"></textarea>
+    </div>
+
+    <!--
+    <div id="xml-c" class="fieldset">
+      <label for="xml">XML Stanza:</label>
+      <textarea id="xml" name="xml" rows="10" cols="50"></textarea>
+    </div>
+
+
+    <fieldset id="presence-c">
+      <legend>Presence:</legend>
+
+      <div class="radio">
+        <input type="radio" id="presence-online" name="presence" value="online">
+        <label for="presence-online">Online</label>
+      </div>
+
+      <div class="radio">
+        <input type="radio" id="presence-offline" name="presence" value="offline">
+        <label for="presence-offline">Offline</label>
+      </div>
+    </div>
+    -->
+
+    <div id="xmpp-submit">
+      <input type="submit" value="Send Message">
+    </div>
+
+  </form>
+</div>
+<!--
+<script type="text/javascript">
+  var messageTypes = ['chat', 'xml', 'presence'];
+
+  var messageTypeEls = [];
+  for (var i = 0, messageType; messageType = messageTypes[i]; i++) {
+    var messageTypeEl = document.getElementById('message-type-' +
+        messageType);
+    messageTypeEls.push(messageTypeEl);
+  }
+
+  // Initializes the chosen type to be the first radio.
+  var chosenMessageTypeId = messageTypeEls[0].id;
+
+  var messageTypeDict = {};
+  for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+    var type = messageTypeEl.id.replace('message-type-', '');
+    var formEl = document.getElementById(type + '-c');
+    messageTypeDict[messageTypeEl.id] = formEl;
+    // Initially hides all of the conditional form elements.
+    formEl.style.display = 'none';
+  }
+
+  var setChosenMessageType = function(messageTypeId) {
+    document.getElementById(messageTypeId).checked = true;
+
+    // Hides previously chosen message type
+    messageTypeDict[chosenMessageTypeId].style.display = 'none';
+
+    // Sets the new chosen type and shows its field.
+    chosenMessageTypeId = messageTypeId;
+    messageTypeDict[chosenMessageTypeId].style.display = '';
+  }
+
+  var messageTypeClickHandler = function(e) {
+    for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+      if (messageTypeEl.checked) {
+        setChosenMessageType(messageTypeEl.id);
+        break;
+      }
+    }
+  };
+
+  // set up event listeners
+  for (var i = 0, messageTypeEl; messageTypeEl = messageTypeEls[i]; i++) {
+    messageTypeEl.onclick = messageTypeClickHandler;
+  }
+
+  // Init
+  setChosenMessageType(chosenMessageTypeId);
+
+</script>
+-->
+
+{% endblock %}
+
+{% block final %}
+{% endblock %}
--- a/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/db/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -122,6 +122,7 @@
 Text = datastore_types.Text
 Blob = datastore_types.Blob
 ByteString = datastore_types.ByteString
+BlobKey = datastore_types.BlobKey
 
 _kind_map = {}
 
@@ -186,6 +187,7 @@
     PhoneNumber,
     PostalAddress,
     Rating,
+    BlobKey,
     ])
 
 _ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
@@ -241,6 +243,49 @@
         "definition." % locals())
 
 
+def query_descendants(model_instance):
+  """Returns a query for all the descendants of a model instance.
+
+  Args:
+    model_instance: Model instance to find the descendants of.
+
+  Returns:
+    Query that will retrieve all entities that have the given model instance
+  as an ancestor. Unlike normal ancestor queries, this does not include the
+  ancestor itself.
+  """
+
+  result = Query().ancestor(model_instance);
+  result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
+                model_instance.key());
+  return result;
+
+
+def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
+  """Encodes a model instance as a protocol buffer.
+
+  Args:
+    model_instance: Model instance to encode.
+  Returns:
+    entity_pb.EntityProto representation of the model instance
+  """
+  return model_instance._populate_entity(_entity_class).ToPb()
+
+
+def model_from_protobuf(pb, _entity_class=datastore.Entity):
+  """Decodes a model instance from a protocol buffer.
+
+  Args:
+    pb: The protocol buffer representation of the model instance. Can be an
+        entity_pb.EntityProto or str encoding of an entity_bp.EntityProto
+
+  Returns:
+    Model instance resulting from decoding the protocol buffer
+  """
+  entity = _entity_class.FromPb(pb)
+  return class_for_kind(entity.kind()).from_entity(entity)
+
+
 def _initialize_properties(model_class, name, bases, dct):
   """Initialize Property attributes for Model-class.
 
@@ -248,17 +293,31 @@
     model_class: Model class to initialize properties for.
   """
   model_class._properties = {}
+  property_source = {}
+
+  def get_attr_source(name, cls):
+    for src_cls  in cls.mro():
+      if name in src_cls.__dict__:
+        return src_cls
+
   defined = set()
   for base in bases:
     if hasattr(base, '_properties'):
-      property_keys = base._properties.keys()
-      duplicate_properties = defined.intersection(property_keys)
-      if duplicate_properties:
-        raise DuplicatePropertyError(
-            'Duplicate properties in base class %s already defined: %s' %
-            (base.__name__, list(duplicate_properties)))
-      defined.update(property_keys)
-      model_class._properties.update(base._properties)
+      property_keys = set(base._properties.keys())
+      duplicate_property_keys = defined & property_keys
+      for dupe_prop_name in duplicate_property_keys:
+        old_source = property_source[dupe_prop_name] = get_attr_source(
+            dupe_prop_name, property_source[dupe_prop_name])
+        new_source = get_attr_source(dupe_prop_name, base)
+        if old_source != new_source:
+          raise DuplicatePropertyError(
+              'Duplicate property, %s, is inherited from both %s and %s.' %
+              (dupe_prop_name, old_source.__name__, new_source.__name__))
+      property_keys -= duplicate_property_keys
+      if property_keys:
+        defined |= property_keys
+        property_source.update(dict.fromkeys(property_keys, base))
+        model_class._properties.update(base._properties)
 
   for attr_name in dct.keys():
     attr = dct[attr_name]
@@ -557,6 +616,7 @@
   def __init__(self,
                parent=None,
                key_name=None,
+               key=None,
                _app=None,
                _from_entity=False,
                **kwds):
@@ -582,38 +642,64 @@
       parent: Parent instance for this instance or None, indicating a top-
         level instance.
       key_name: Name for new model instance.
-      _app: Intentionally undocumented.
+      key: Key instance for this instance, overrides parent and key_name
       _from_entity: Intentionally undocumented.
       args: Keyword arguments mapping to properties of model.
     """
-    if key_name == '':
-      raise BadKeyError('Name cannot be empty.')
-    elif key_name is not None and not isinstance(key_name, basestring):
-      raise BadKeyError('Name must be string type, not %s' %
-                        key_name.__class__.__name__)
-
-    if parent is not None:
-      if not isinstance(parent, (Model, Key)):
-        raise TypeError('Expected Model type; received %s (is %s)' %
-                        (parent, parent.__class__.__name__))
-      if isinstance(parent, Model) and not parent.has_key():
-        raise BadValueError(
-            "%s instance must have a complete key before it can be used as a "
-            "parent." % parent.kind())
-      if isinstance(parent, Key):
-        self._parent_key = parent
+    if key is not None:
+      if isinstance(key, (tuple, list)):
+        key = Key.from_path(*key)
+      if isinstance(key, basestring):
+        key = Key(encoded=key)
+      if not isinstance(key, Key):
+        raise TypeError('Expected Key type; received %s (is %s)' %
+                        (key, key.__class__.__name__))
+      if not key.has_id_or_name():
+        raise BadKeyError('Key must have an id or name')
+      if key.kind() != self.kind():
+        raise BadKeyError('Expected Key kind to be %s; received %s' %
+                          (self.kind(), key.kind()))
+      if _app is not None and key.app() != _app:
+        raise BadKeyError('Expected Key app to be %s; received %s' %
+                          (_app, key.app()))
+      if key_name is not None:
+        raise BadArgumentError('Cannot use key and key_name at the same time')
+      if parent is not None:
+        raise BadArgumentError('Cannot use key and parent at the same time')
+      self._key = key
+      self._key_name = None
+      self._parent = None
+      self._parent_key = None
+    else:
+      if key_name == '':
+        raise BadKeyError('Name cannot be empty.')
+      elif key_name is not None and not isinstance(key_name, basestring):
+        raise BadKeyError('Name must be string type, not %s' %
+                          key_name.__class__.__name__)
+
+      if parent is not None:
+        if not isinstance(parent, (Model, Key)):
+          raise TypeError('Expected Model type; received %s (is %s)' %
+                          (parent, parent.__class__.__name__))
+        if isinstance(parent, Model) and not parent.has_key():
+          raise BadValueError(
+              "%s instance must have a complete key before it can be used as a "
+              "parent." % parent.kind())
+        if isinstance(parent, Key):
+          self._parent_key = parent
+          self._parent = None
+        else:
+          self._parent_key = parent.key()
+          self._parent = parent
+      else:
+        self._parent_key = None
         self._parent = None
-      else:
-        self._parent_key = parent.key()
-        self._parent = parent
-    else:
-      self._parent_key = None
-      self._parent = None
+      self._key_name = key_name
+      self._key = None
+
     self._entity = None
-    self._key_name = key_name
     self._app = _app
 
-    properties = self.properties()
     for prop in self.properties().values():
       if prop.name in kwds:
         value = kwds[prop.name]
@@ -629,8 +715,9 @@
     """Unique key for this entity.
 
     This property is only available if this entity is already stored in the
-    datastore, so it is available if this entity was fetched returned from a
-    query, or after put() is called the first time for new entities.
+    datastore or if it has a full key, so it is available if this entity was
+    fetched returned from a query, or after put() is called the first time
+    for new entities, or if a complete key was given when constructed.
 
     Returns:
       Datastore key of persisted entity.
@@ -640,13 +727,12 @@
     """
     if self.is_saved():
       return self._entity.key()
+    elif self._key:
+      return self._key
     elif self._key_name:
-      if self._parent_key:
-        parent_key = self._parent_key
-      elif self._parent:
-          parent_key = self._parent.key()
       parent = self._parent_key or (self._parent and self._parent.key())
-      return Key.from_path(self.kind(), self._key_name, parent=parent)
+      self._key = Key.from_path(self.kind(), self._key_name, parent=parent)
+      return self._key
     else:
       raise NotSavedError()
 
@@ -675,8 +761,11 @@
       Populated self._entity
     """
     self._entity = self._populate_entity(_entity_class=_entity_class)
-    if hasattr(self, '_key_name'):
-      del self._key_name
+    for attr in ('_key_name', '_key'):
+      try:
+        delattr(self, attr)
+      except AttributeError:
+        pass
     return self._entity
 
   def put(self):
@@ -713,13 +802,21 @@
       entity = self._entity
     else:
       kwds = {'_app': self._app,
-              'name': self._key_name,
               'unindexed_properties': self._unindexed_properties}
-
-      if self._parent_key is not None:
-        kwds['parent'] = self._parent_key
-      elif self._parent is not None:
-        kwds['parent'] = self._parent._entity
+      if self._key is not None:
+        if self._key.id():
+          kwds['id'] = self._key.id()
+        else:
+          kwds['name'] = self._key.name()
+        if self._key.parent():
+          kwds['parent'] = self._key.parent()
+      else:
+        if self._key_name is not None:
+          kwds['name'] = self._key_name
+        if self._parent_key is not None:
+          kwds['parent'] = self._parent_key
+        elif self._parent is not None:
+          kwds['parent'] = self._parent._entity
       entity = _entity_class(self.kind(), **kwds)
 
     self._to_entity(entity)
@@ -749,14 +846,15 @@
   def has_key(self):
     """Determine if this model instance has a complete key.
 
-    Ids are not assigned until the data is saved to the Datastore, but
-    instances with a key name always have a full key.
+    When not using a fully self-assigned Key, ids are not assigned until the
+    data is saved to the Datastore, but instances with a key name always have
+    a full key.
 
     Returns:
-      True if the object has been persisted to the datastore or has a key_name,
-      otherwise False.
+      True if the object has been persisted to the datastore or has a key
+      or has a key_name, otherwise False.
     """
-    return self.is_saved() or self._key_name
+    return self.is_saved() or self._key or self._key_name
 
   def dynamic_properties(self):
     """Returns a list of all dynamic properties defined for instance."""
@@ -794,6 +892,8 @@
       return self._parent.key()
     elif self._entity is not None:
       return self._entity.parent()
+    elif self._key is not None:
+      return self._key.parent()
     else:
       return None
 
@@ -1017,8 +1117,12 @@
 
     entity_values = cls._load_entity_values(entity)
     instance = cls(None, _from_entity=True, **entity_values)
-    instance._entity = entity
-    del instance._key_name
+    if entity.is_saved():
+      instance._entity = entity
+      del instance._key_name
+      del instance._key
+    elif entity.key().has_id_or_name():
+      instance._key = entity.key()
     return instance
 
   @classmethod
@@ -1126,6 +1230,33 @@
     keys.append(key)
   datastore.Delete(keys)
 
+def allocate_ids(model, size):
+  """Allocates a range of IDs of size for the model_key defined by model
+
+  Allocates a range of IDs in the datastore such that those IDs will not
+  be automatically assigned to new entities. You can only allocate IDs
+  for model keys from your app. If there is an error, raises a subclass of
+  datastore_errors.Error.
+
+  Args:
+    model: Model, Key or string to serve as a model specifying the ID sequence
+           in which to allocate IDs
+
+  Returns:
+    (start, end) of the allocated range, inclusive.
+  """
+  models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
+      model, (Model, Key, basestring))
+  keys = []
+  for model_or_key in models_or_keys:
+    if isinstance(model_or_key, Model):
+      key = model_or_key = model_or_key.key()
+    elif isinstance(model_or_key, basestring):
+      key = model_or_key = Key(model_or_key)
+    else:
+      key = model_or_key
+    keys.append(key)
+  return datastore.AllocateIds(keys, size)
 
 class Expando(Model):
   """Dynamically expandable model.
@@ -1322,7 +1453,7 @@
 class _BaseQuery(object):
   """Base class for both Query and GqlQuery."""
 
-  def __init__(self, model_class, keys_only=False):
+  def __init__(self, model_class=None, keys_only=False):
     """Constructor.
 
     Args:
@@ -1428,7 +1559,10 @@
     if self._keys_only:
       return raw
     else:
-      return [self._model_class.from_entity(e) for e in raw]
+      if self._model_class is not None:
+        return [self._model_class.from_entity(e) for e in raw]
+      else:
+        return [class_for_kind(e.kind()).from_entity(e) for e in raw]
 
   def __getitem__(self, arg):
     """Support for query[index] and query[start:stop].
@@ -1505,7 +1639,11 @@
     Raises:
       StopIteration when there are no more results in query.
     """
-    return self.__model_class.from_entity(self.__iterator.next())
+    if self.__model_class is not None:
+      return self.__model_class.from_entity(self.__iterator.next())
+    else:
+      entity = self.__iterator.next()
+      return class_for_kind(entity.kind()).from_entity(entity)
 
 
 def _normalize_query_parameter(value):
@@ -1569,7 +1707,7 @@
        print story.title
   """
 
-  def __init__(self, model_class, keys_only=False):
+  def __init__(self, model_class=None, keys_only=False):
     """Constructs a query over instances of the given Model.
 
     Args:
@@ -1586,7 +1724,11 @@
                  _multi_query_class=datastore.MultiQuery):
     queries = []
     for query_set in self.__query_sets:
-      query = _query_class(self._model_class.kind(),
+      if self._model_class is not None:
+        kind = self._model_class.kind()
+      else:
+        kind = None
+      query = _query_class(kind,
                            query_set,
                            keys_only=self._keys_only)
       query.Order(*self.__orderings)
@@ -1665,7 +1807,12 @@
     else:
       operator = '=='
 
-    if prop in self._model_class._unindexed_properties:
+    if self._model_class is None:
+      if prop != datastore_types._KEY_SPECIAL_PROPERTY:
+        raise BadQueryError(
+            'Only %s filters are allowed on kindless queries.' %
+            datastore_types._KEY_SPECIAL_PROPERTY)
+    elif prop in self._model_class._unindexed_properties:
       raise PropertyError('Property \'%s\' is not indexed' % prop)
 
     if operator.lower() == 'in':
@@ -1711,13 +1858,20 @@
     else:
       order = datastore.Query.ASCENDING
 
-    if not issubclass(self._model_class, Expando):
-      if (property not in self._model_class.properties() and
-          property not in datastore_types._SPECIAL_PROPERTIES):
-        raise PropertyError('Invalid property name \'%s\'' % property)
-
-    if property in self._model_class._unindexed_properties:
-      raise PropertyError('Property \'%s\' is not indexed' % property)
+    if self._model_class is None:
+      if (property != datastore_types._KEY_SPECIAL_PROPERTY or
+          order != datastore.Query.ASCENDING):
+        raise BadQueryError(
+            'Only %s ascending orders are supported on kindless queries' %
+            datastore_types._KEY_SPECIAL_PROPERTY)
+    else:
+      if not issubclass(self._model_class, Expando):
+        if (property not in self._model_class.properties() and
+            property not in datastore_types._SPECIAL_PROPERTIES):
+          raise PropertyError('Invalid property name \'%s\'' % property)
+
+      if property in self._model_class._unindexed_properties:
+        raise PropertyError('Property \'%s\' is not indexed' % property)
 
     self.__orderings.append((property, order))
     return self
@@ -1774,14 +1928,18 @@
     app = kwds.pop('_app', None)
 
     self._proto_query = gql.GQL(query_string, _app=app)
-    model_class = class_for_kind(self._proto_query._entity)
+    if self._proto_query._entity is not None:
+      model_class = class_for_kind(self._proto_query._entity)
+    else:
+      model_class = None
     super(GqlQuery, self).__init__(model_class,
                                    keys_only=self._proto_query._keys_only)
 
-    for property, unused in (self._proto_query.filters().keys() +
-                             self._proto_query.orderings()):
-      if property in model_class._unindexed_properties:
-        raise PropertyError('Property \'%s\' is not indexed' % property)
+    if model_class is not None:
+      for property, unused in (self._proto_query.filters().keys() +
+                               self._proto_query.orderings()):
+        if property in model_class._unindexed_properties:
+          raise PropertyError('Property \'%s\' is not indexed' % property)
 
     self.bind(*args, **kwds)
 
@@ -2404,7 +2562,6 @@
   data_type = users.User
 
 
-
 class ListProperty(Property):
   """A property that stores a list of things.
 
--- a/thirdparty/google_appengine/google/appengine/ext/db/polymodel.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/db/polymodel.py	Sun Sep 06 23:31:53 2009 +0200
@@ -87,9 +87,12 @@
         itself so that it subclasses can quickly know what the root of
         their hierarchy is and what kind they are stored in.
       __class_hierarchy__: List of classes describing the new model's place
-        in the class hierarchy.  The first element is always the root
-        element while the last element is the new class itself.  For example:
+        in the class hierarchy in reverse MRO order.  The first element is
+        always the root class while the last element is always the new class.
 
+        MRO documentation: http://www.python.org/download/releases/2.3/mro/
+
+        For example:
           class Foo(PolymorphicClass): ...
 
           class Bar(Foo): ...
@@ -107,30 +110,29 @@
     discriminator (the 'class' property of the entity) when loading from the
     datastore.
     """
-    if name == 'PolyModel' or PolyModel not in bases:
-      db._initialize_properties(cls, name, bases, dct)
-      super(db.PropertiedClass, cls).__init__(name, bases, dct)
-    else:
-      cls.__root_class__ = cls
-      super(PolymorphicClass, cls).__init__(name, bases, dct)
-
     if name == 'PolyModel':
+      super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
       return
 
-    if cls is not cls.__root_class__:
-      poly_class = None
-      for base in cls.__bases__:
-        if issubclass(base, PolyModel):
-          poly_class = base
-          break
-      else:
+    elif PolyModel in bases:
+      if getattr(cls, '__class_hierarchy__', None):
+        raise db.ConfigurationError(('%s cannot derive from PolyModel as '
+            '__class_hierarchy__ is already defined.') % cls.__name__)
+      cls.__class_hierarchy__ = [cls]
+      cls.__root_class__ = cls
+      super(PolymorphicClass, cls).__init__(name, bases, dct)
+    else:
+      super(PolymorphicClass, cls).__init__(name, bases, dct, map_kind=False)
+
+      cls.__class_hierarchy__ = [c for c in reversed(cls.mro())
+          if issubclass(c, PolyModel) and c != PolyModel]
+
+      if cls.__class_hierarchy__[0] != cls.__root_class__:
         raise db.ConfigurationError(
-            "Polymorphic class '%s' does not inherit from PolyModel."
-            % cls.__name__)
-
-      cls.__class_hierarchy__ = poly_class.__class_hierarchy__ + [cls]
-    else:
-      cls.__class_hierarchy__ = [cls]
+            '%s cannot be derived from both root classes %s and %s' %
+            (cls.__name__,
+            cls.__class_hierarchy__[0].__name__,
+            cls.__root_class__.__name__))
 
     _class_map[cls.class_key()] = cls
 
@@ -310,13 +312,16 @@
     return super(PolyModel, cls).from_entity(entity)
 
   @classmethod
-  def all(cls):
+  def all(cls, **kwds):
     """Get all instance of a class hierarchy.
 
+    Args:
+      kwds: Keyword parameters passed on to Model.all.
+
     Returns:
       Query with filter set to match this class' discriminator.
     """
-    query = super(PolyModel, cls).all()
+    query = super(PolyModel, cls).all(**kwds)
     if cls != cls.__root_class__:
       query.filter(_CLASS_KEY_PROPERTY + ' =', cls.class_name())
     return query
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/deferred/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+
+
+from deferred import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/deferred/deferred.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,267 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A module that handles deferred execution of callables via the task queue.
+
+Tasks consist of a callable and arguments to pass to it. The callable and its
+arguments are serialized and put on the task queue, which deserializes and
+executes them. The following callables can be used as tasks:
+
+1) Functions defined in the top level of a module
+2) Classes defined in the top level of a module
+3) Instances of classes in (2) that implement __call__
+4) Instance methods of objects of classes in (2)
+5) Class methods of classes in (2)
+6) Built-in functions
+7) Built-in methods
+
+The following callables can NOT be used as tasks:
+1) Nested functions or closures
+2) Nested classes or objects of them
+3) Lambda functions
+4) Static methods
+
+The arguments to the callable, and the object (in the case of method or object
+calls) must all be pickleable.
+
+If you want your tasks to execute reliably, don't use mutable global variables;
+they are not serialized with the task and may not be the same when your task
+executes as they were when it was enqueued (in fact, they will almost certainly
+be different).
+
+If your app relies on manipulating the import path, make sure that the function
+you are deferring is defined in a module that can be found without import path
+manipulation. Alternately, you can include deferred.TaskHandler in your own
+webapp application instead of using the easy-install method detailed below.
+
+When you create a deferred task using deferred.defer, the task is serialized,
+and an attempt is made to add it directly to the task queue. If the task is too
+big (larger than about 10 kilobytes when serialized), a datastore entry will be
+created for the task, and a new task will be enqueued, which will fetch the
+original task from the datastore and execute it. This is much less efficient
+than the direct execution model, so it's a good idea to minimize the size of
+your tasks when possible.
+
+In order for tasks to be processed, you need to set up the handler. Add the
+following to your app.yaml handlers section:
+
+handlers:
+- url: /_ah/queue/deferred
+  script: $PYTHON_LIB/google/appengine/ext/deferred/__init__.py
+  login: admin
+
+By default, the deferred module uses the URL above, and the default queue.
+
+Example usage:
+
+  def do_something_later(key, amount):
+    entity = MyModel.get(key)
+    entity.total += amount
+    entity.put()
+
+  # Use default URL and queue name, no task name, execute ASAP.
+  deferred.defer(do_something_later, 20)
+
+  # Providing non-default task queue arguments
+  deferred.defer(do_something_later, 20, _queue="foo", countdown=60)
+"""
+
+
+
+
+
+import logging
+import pickle
+import types
+
+from google.appengine.api.labs import taskqueue
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp.util import run_wsgi_app
+
+
+_TASKQUEUE_HEADERS = {"Content-Type": "application/octet-stream"}
+_DEFAULT_URL = "/_ah/queue/deferred"
+_DEFAULT_QUEUE = "default"
+
+
+class Error(Exception):
+  """Base class for exceptions in this module."""
+
+
+class PermanentTaskFailure(Error):
+  """Indicates that a task failed, and will never succeed."""
+
+
+def run(data):
+  """Unpickles and executes a task.
+
+  Args:
+    data: A pickled tuple of (function, args, kwargs) to execute.
+  Returns:
+    The return value of the function invocation.
+  """
+  try:
+    func, args, kwds = pickle.loads(data)
+  except Exception, e:
+    raise PermanentTaskFailure(e)
+  else:
+    return func(*args, **kwds)
+
+
+class _DeferredTaskEntity(db.Model):
+  """Datastore representation of a deferred task.
+
+  This is used in cases when the deferred task is too big to be included as
+  payload with the task queue entry.
+  """
+  data = db.BlobProperty(required=True)
+
+
+def run_from_datastore(key):
+  """Retrieves a task from the datastore and executes it.
+
+  Args:
+    key: The datastore key of a _DeferredTaskEntity storing the task.
+  Returns:
+    The return value of the function invocation.
+  """
+  entity = _DeferredTaskEntity.get(key)
+  if not entity:
+    raise PermanentTaskFailure()
+  try:
+    ret = run(entity.data)
+    entity.delete()
+  except PermanentTaskFailure:
+    entity.delete()
+    raise
+
+
+def invoke_member(obj, membername, *args, **kwargs):
+  """Retrieves a member of an object, then calls it with the provided arguments.
+
+  Args:
+    obj: The object to operate on.
+    membername: The name of the member to retrieve from ojb.
+    args: Positional arguments to pass to the method.
+    kwargs: Keyword arguments to pass to the method.
+  Returns:
+    The return value of the method invocation.
+  """
+  return getattr(obj, membername)(*args, **kwargs)
+
+
+def _curry_callable(obj, *args, **kwargs):
+  """Takes a callable and arguments and returns a task queue tuple.
+
+  The returned tuple consists of (callable, args, kwargs), and can be pickled
+  and unpickled safely.
+
+  Args:
+    obj: The callable to curry. See the module docstring for restrictions.
+    args: Positional arguments to call the callable with.
+    kwargs: Keyword arguments to call the callable with.
+  Returns:
+    A tuple consisting of (callable, args, kwargs) that can be evaluated by
+    run() with equivalent effect of executing the function directly.
+  Raises:
+    ValueError: If the passed in object is not of a valid callable type.
+  """
+  if isinstance(obj, types.MethodType):
+    return (invoke_member, (obj.im_self, obj.im_func.__name__) + args, kwargs)
+  elif isinstance(obj, types.BuiltinMethodType):
+    if not obj.__self__:
+      return (obj, args, kwargs)
+    else:
+      return (invoke_member, (obj.__self__, obj.__name__) + args, kwargs)
+  elif isinstance(obj, types.ObjectType) and hasattr(obj, "__call__"):
+    return (obj, args, kwargs)
+  elif isinstance(obj, (types.FunctionType, types.BuiltinFunctionType,
+                        types.ClassType, types.UnboundMethodType)):
+    return (obj, args, kwargs)
+  else:
+    raise ValueError("obj must be callable")
+
+
+def serialize(obj, *args, **kwargs):
+  """Serializes a callable into a format recognized by the deferred executor.
+
+  Args:
+    obj: The callable to serialize. See module docstring for restrictions.
+    args: Positional arguments to call the callable with.
+    kwargs: Keyword arguments to call the callable with.
+  Returns:
+    A serialized representation of the callable.
+  """
+  curried = _curry_callable(obj, *args, **kwargs)
+  return pickle.dumps(curried, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+def defer(obj, *args, **kwargs):
+  """Defers a callable for execution later.
+
+  The default deferred URL of /_ah/queue/deferred will be used unless an
+  alternate URL is explicitly specified. If you want to use the default URL for
+  a queue, specify _url=None. If you specify a different URL, you will need to
+  install the handler on that URL (see the module docstring for details).
+
+  Args:
+    obj: The callable to execute. See module docstring for restrictions.
+    _countdown, _eta, _name, _url, _queue: Passed through to the task queue -
+      see the task queue documentation for details.
+    args: Positional arguments to call the callable with.
+    kwargs: Any other keyword arguments are passed through to the callable.
+  """
+  taskargs = dict((x, kwargs.pop(("_%s" % x), None))
+                  for x in ("countdown", "eta", "name"))
+  taskargs["url"] = kwargs.pop("_url", _DEFAULT_URL)
+  taskargs["headers"] = _TASKQUEUE_HEADERS
+  queue = kwargs.pop("_queue", _DEFAULT_QUEUE)
+  pickled = serialize(obj, *args, **kwargs)
+  try:
+    task = taskqueue.Task(payload=pickled, **taskargs)
+    task.add(queue)
+  except taskqueue.TaskTooLargeError:
+    key = _DeferredTaskEntity(data=pickled).put()
+    pickled = serialize(run_from_datastore, str(key))
+    task = taskqueue.Task(payload=pickled, **taskargs)
+    task.add(queue)
+
+
+class TaskHandler(webapp.RequestHandler):
+  """A webapp handler class that processes deferred invocations."""
+
+  def post(self):
+    headers = ["%s:%s" % (k, v) for k, v in self.request.headers.items()
+               if k.lower().startswith("x-appengine-")]
+    logging.info(", ".join(headers))
+
+    try:
+      run(self.request.body)
+    except PermanentTaskFailure, e:
+      logging.exception("Permanent failure attempting to execute task")
+
+
+application = webapp.WSGIApplication([(".*", TaskHandler)])
+
+
+def main():
+  run_wsgi_app(application)
+
+
+if __name__ == "__main__":
+  main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from ereporter import *
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/ereporter.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A logging handler that records information about unique exceptions.
+
+'Unique' in this case is defined as a given (exception class, location) tuple.
+Unique exceptions are logged to the datastore with an example stacktrace and an
+approximate count of occurrences, grouped by day and application version.
+
+A cron handler, in google.appengine.ext.ereporter.report_generator, constructs
+and emails a report based on the previous day's exceptions.
+
+Example usage:
+
+In your handler script(s), add:
+
+  import logging
+  from google.appengine.ext import ereporter
+
+  ereporter.register_logger()
+
+In your app.yaml, add:
+
+  handlers:
+  - url: /_ereporter/.*
+    script: $PYTHON_LIB/google/appengine/ext/ereporter/report_generator.py
+    login: admin
+
+In your cron.yaml, add:
+
+  cron:
+  - description: Daily exception report
+    url: /_ereporter?sender=you@yourdomain.com
+    schedule: every day 00:00
+
+This will cause a daily exception report to be generated and emailed to all
+admins, with exception traces grouped by minor version. If you only want to
+get exception information for the most recent minor version, add the
+'versions=latest' argument to the query string. For other valid query string
+arguments, see report_generator.py.
+
+If you anticipate a lot of exception traces (for example, if you're deploying
+many minor versions, each of which may have its own set of exceptions), you
+can ensure that the traces from the newest minor versions get included by adding
+this to your index.yaml:
+
+  indexes:
+  - kind: __google_ExceptionRecord
+    properties:
+    - name: date
+    - name: major_version
+    - name: minor_version
+      direction: desc
+"""
+
+
+
+
+
+import datetime
+import logging
+import os
+import sha
+import traceback
+import urllib
+
+from google.appengine.api import memcache
+from google.appengine.ext import db
+from google.appengine.ext import webapp
+
+
+MAX_SIGNATURE_LENGTH = 256
+
+
+class ExceptionRecord(db.Model):
+  """Datastore model for a record of a unique exception."""
+
+  signature = db.StringProperty(required=True)
+  major_version = db.StringProperty(required=True)
+  minor_version = db.IntegerProperty(required=True)
+  date = db.DateProperty(required=True)
+  count = db.IntegerProperty(required=True, default=0)
+
+  stacktrace = db.TextProperty(required=True)
+  http_method = db.TextProperty(required=True)
+  url = db.TextProperty(required=True)
+  handler = db.TextProperty(required=True)
+
+  @classmethod
+  def get_key_name(cls, signature, version, date=None):
+    """Generates a key name for an exception record.
+
+    Args:
+      signature: A signature representing the exception and its site.
+      version: The major/minor version of the app the exception occurred in.
+      date: The date the exception occurred.
+
+    Returns:
+      The unique key name for this exception record.
+    """
+    if not date:
+      date = datetime.date.today()
+    return '%s@%s:%s' % (signature, date, version)
+
+
+class ExceptionRecordingHandler(logging.Handler):
+  """A handler that records exception data to the App Engine datastore."""
+
+  def __init__(self, log_interval=10):
+    """Constructs a new ExceptionRecordingHandler.
+
+    Args:
+      log_interval: The minimum interval at which we will log an individual
+        exception. This is a per-exception timeout, so doesn't affect the
+        aggregate rate of exception logging, only the rate at which we record
+        ocurrences of a single exception, to prevent datastore contention.
+    """
+    self.log_interval = log_interval
+    logging.Handler.__init__(self)
+
+  @classmethod
+  def __RelativePath(cls, path):
+    """Rewrites a path to be relative to the app's root directory.
+
+    Args:
+      path: The path to rewrite.
+
+    Returns:
+      The path with the prefix removed, if that prefix matches the app's
+        root directory.
+    """
+    cwd = os.getcwd()
+    if path.startswith(cwd):
+      path = path[len(cwd)+1:]
+    return path
+
+  @classmethod
+  def __GetSignature(cls, exc_info):
+    """Returns a unique signature string for an exception.
+
+    Args:
+      exc_info: The exc_info object for an exception.
+
+    Returns:
+      A unique signature string for the exception, consisting of fully
+      qualified exception name and call site.
+    """
+    ex_type, unused_value, trace = exc_info
+    frames = traceback.extract_tb(trace)
+
+    fulltype = '%s.%s' % (ex_type.__module__, ex_type.__name__)
+    path, line_no = frames[-1][:2]
+    path = cls.__RelativePath(path)
+    site = '%s:%d' % (path, line_no)
+    signature = '%s@%s' % (fulltype, site)
+    if len(signature) > MAX_SIGNATURE_LENGTH:
+      signature = 'hash:%s' % sha.new(signature).hexdigest()
+
+    return signature
+
+  @classmethod
+  def __GetURL(cls):
+    """Returns the URL of the page currently being served.
+
+    Returns:
+      The full URL of the page currently being served.
+    """
+    if os.environ['SERVER_PORT'] == '80':
+      scheme = 'http://'
+    else:
+      scheme = 'https://'
+    host = os.environ['SERVER_NAME']
+    script_name = urllib.quote(os.environ['SCRIPT_NAME'])
+    path_info = urllib.quote(os.environ['PATH_INFO'])
+    qs = os.environ.get('QUERY_STRING', '')
+    if qs:
+      qs = '?' + qs
+    return scheme + host + script_name + path_info + qs
+
+  def __GetFormatter(self):
+    """Returns the log formatter for this handler.
+
+    Returns:
+      The log formatter to use.
+    """
+    if self.formatter:
+      return self.formatter
+    else:
+      return logging._defaultFormatter
+
+  def emit(self, record):
+    """Log an error to the datastore, if applicable.
+
+    Args:
+      The logging.LogRecord object.
+        See http://docs.python.org/library/logging.html#logging.LogRecord
+    """
+    try:
+      if not record.exc_info:
+        return
+
+      signature = self.__GetSignature(record.exc_info)
+
+      if not memcache.add(signature, None, self.log_interval):
+        return
+
+      db.run_in_transaction_custom_retries(1, self.__EmitTx, signature,
+                                           record.exc_info)
+    except Exception:
+      self.handleError(record)
+
+  def __EmitTx(self, signature, exc_info):
+    """Run in a transaction to insert or update the record for this transaction.
+
+    Args:
+      signature: The signature for this exception.
+      exc_info: The exception info record.
+    """
+    today = datetime.date.today()
+    version = os.environ['CURRENT_VERSION_ID']
+    major_ver, minor_ver = version.rsplit('.', 1)
+    minor_ver = int(minor_ver)
+    key_name = ExceptionRecord.get_key_name(signature, version)
+
+    exrecord = ExceptionRecord.get_by_key_name(key_name)
+    if not exrecord:
+      exrecord = ExceptionRecord(
+          key_name=key_name,
+          signature=signature,
+          major_version=major_ver,
+          minor_version=minor_ver,
+          date=today,
+          stacktrace=self.__GetFormatter().formatException(exc_info),
+          http_method=os.environ['REQUEST_METHOD'],
+          url=self.__GetURL(),
+          handler=self.__RelativePath(os.environ['PATH_TRANSLATED']))
+
+    exrecord.count += 1
+    exrecord.put()
+
+
+def register_logger(logger=None):
+  if not logger:
+    logger = logging.getLogger()
+  handler = ExceptionRecordingHandler()
+  logger.addHandler(handler)
+  return handler
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/report_generator.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Generates and emails daily exception reports.
+
+See google/appengine/ext/ereporter/__init__.py for usage details.
+
+Valid query string arguments to the report_generator script include:
+delete:   Set to 'false' to prevent deletion of exception records from the
+          datastore after sending a report. Defaults to 'true'.
+debug:    Set to 'true' to return the report in the response instead of
+          emailing it.
+date:     The date to generate the report for, in yyyy-mm-dd format. Defaults to
+          yesterday's date. Useful for debugging.
+max_results: Maximum number of entries to include in a report.
+sender:   The email address to use as the sender. Must be an administrator.
+to:       If specified, send reports to this address. If not specified, all
+          admins are sent the report.
+versions: 'all' to report on all minor versions, or 'latest' for the latest.
+"""
+
+
+
+
+
+import datetime
+import itertools
+import os
+import re
+from xml.sax import saxutils
+
+from google.appengine.api import mail
+from google.appengine.ext import db
+from google.appengine.ext import ereporter
+from google.appengine.ext import webapp
+from google.appengine.ext.webapp import template
+from google.appengine.ext.webapp.util import run_wsgi_app
+
+
+def isTrue(val):
+  """Determines if a textual value represents 'true'.
+
+  Args:
+    val: A string, which may be 'true', 'yes', 't', '1' to indicate True.
+  Returns:
+    True or False
+  """
+  val = val.lower()
+  return val == 'true' or val == 't' or val == '1' or val == 'yes'
+
+
+class ReportGenerator(webapp.RequestHandler):
+  """Handler class to generate and email an exception report."""
+
+  DEFAULT_MAX_RESULTS = 100
+
+  def __init__(self, send_mail=mail.send_mail,
+               mail_admins=mail.send_mail_to_admins):
+    super(ReportGenerator, self).__init__()
+
+    self.send_mail = send_mail
+    self.send_mail_to_admins = mail_admins
+
+  def GetQuery(self, order=None):
+    """Creates a query object that will retrieve the appropriate exceptions.
+
+    Returns:
+      A query to retrieve the exceptions required.
+    """
+    q = ereporter.ExceptionRecord.all()
+    q.filter('date =', self.yesterday)
+    q.filter('major_version =', self.major_version)
+    if self.version_filter.lower() == 'latest':
+      q.filter('minor_version =', self.minor_version)
+    if order:
+      q.order(order)
+    return q
+
+  def GenerateReport(self, exceptions):
+    """Generates an HTML exception report.
+
+    Args:
+      exceptions: A list of ExceptionRecord objects. This argument will be
+        modified by this function.
+    Returns:
+      An HTML exception report.
+    """
+    exceptions.sort(key=lambda e: (e.minor_version, -e.count))
+    versions = [(minor, list(excs)) for minor, excs
+                in itertools.groupby(exceptions, lambda e: e.minor_version)]
+
+    template_values = {
+        'version_filter': self.version_filter,
+        'version_count': len(versions),
+
+        'exception_count': sum(len(excs) for _, excs in versions),
+
+        'occurrence_count': sum(y.count for x in versions for y in x[1]),
+        'app_id': self.app_id,
+        'major_version': self.major_version,
+        'date': self.yesterday,
+        'versions': versions,
+    }
+    path = os.path.join(os.path.dirname(__file__), 'templates', 'report.html')
+    return template.render(path, template_values)
+
+  def SendReport(self, report):
+    """Emails an exception report.
+
+    Args:
+      report: A string containing the report to send.
+    """
+    subject = ('Daily exception report for app "%s", major version "%s"'
+               % (self.app_id, self.major_version))
+    report_text = saxutils.unescape(re.sub('<[^>]+>', '', report))
+    mail_args = {
+        'sender': self.sender,
+        'subject': subject,
+        'body': report_text,
+        'html': report,
+    }
+    if self.to:
+      mail_args['to'] = self.to
+      self.send_mail(**mail_args)
+    else:
+      self.send_mail_to_admins(**mail_args)
+
+  def get(self):
+    self.version_filter = self.request.GET.get('versions', 'all')
+    self.sender = self.request.GET['sender']
+    self.to = self.request.GET.get('to', None)
+    report_date = self.request.GET.get('date', None)
+    if report_date:
+      self.yesterday = datetime.date(*[int(x) for x in report_date.split('-')])
+    else:
+      self.yesterday = datetime.date.today() - datetime.timedelta(days=1)
+    self.app_id = os.environ['APPLICATION_ID']
+    version = os.environ['CURRENT_VERSION_ID']
+    self.major_version, self.minor_version = version.rsplit('.', 1)
+    self.minor_version = int(self.minor_version)
+    self.max_results = int(self.request.GET.get('max_results',
+                                                self.DEFAULT_MAX_RESULTS))
+    self.debug = isTrue(self.request.GET.get('debug', 'false'))
+    self.delete = isTrue(self.request.GET.get('delete', 'true'))
+
+    try:
+      exceptions = self.GetQuery(order='-minor_version').fetch(self.max_results)
+    except db.NeedIndexError:
+      exceptions = self.GetQuery().fetch(self.max_results)
+
+    if exceptions:
+      report = self.GenerateReport(exceptions)
+      if self.debug:
+        self.response.out.write(report)
+      else:
+        self.SendReport(report)
+
+      if self.delete:
+        db.delete(exceptions)
+
+
+application = webapp.WSGIApplication([('.*', ReportGenerator)])
+
+
+def main():
+  run_wsgi_app(application)
+
+
+if __name__ == '__main__':
+  main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/ereporter/templates/report.html	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,15 @@
+<!-- Unusual layout is to ensure template is useful with tags stripped, too -->
+<html><head><title>Daily exception report for app "{{app_id}}", major version "{{major_version}}".</title></head>
+<body><p>At least {{occurrence_count}} occurrences of {{exception_count}} exceptions across {{version_count}} versions.</p>
+{% for version in versions %}
+<h1>Minor version {{version.0}}</h1>
+{% for exception in version.1 %}
+<h2>{{exception.signature}} (at least {{exception.count}} occurrences)</h2>
+  <table><tr><th>Handler:</th> <td>{{exception.handler}}</td></tr>
+  <tr><th>URL:</th> <td>{{exception.method|escape}} {{exception.url|escape}}</td></tr>
+  <tr><th>Stacktrace:</th>
+
+<td><pre>{{exception.stacktrace|escape}}</pre></td></tr></table>
+
+
+{% endfor %}{% endfor %}</body>
\ No newline at end of file
--- a/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/gql/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -77,7 +77,7 @@
 
   The syntax for SELECT is fairly straightforward:
 
-  SELECT [* | __key__ ] FROM <entity>
+  SELECT [* | __key__ ] [FROM <entity>]
     [WHERE <condition> [AND <condition> ...]]
     [ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
     [LIMIT [<offset>,]<count>]
@@ -805,14 +805,16 @@
     Returns:
       True if parsing completed okay.
     """
-    self.__Expect('FROM')
-    entity = self.__AcceptRegex(self.__identifier_regex)
-    if entity:
-      self._entity = entity
-      return self.__Where()
+    if self.__Accept('FROM'):
+      kind = self.__AcceptRegex(self.__identifier_regex)
+      if kind:
+        self._entity = kind
+      else:
+        self.__Error('Identifier Expected')
+        return False
     else:
-      self.__Error('Identifier Expected')
-      return False
+      self._entity = None
+    return self.__Where()
 
   def __Where(self):
     """Consume the WHERE cluase.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/key_range/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,570 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Key range representation and splitting."""
+
+
+import os
+
+try:
+  import simplejson
+except ImportError:
+  simplejson = None
+
+from google.appengine.api import datastore
+from google.appengine.datastore import datastore_pb
+from google.appengine.ext import db
+
+
+class Error(Exception):
+  """Base class for exceptions in this module."""
+
+
+class KeyRangeError(Error):
+  """Error while trying to generate a KeyRange."""
+
+
+class SimplejsonUnavailableError(Error):
+  """Error while using json functionality whith unavailable simplejson."""
+
+class EmptyDbQuery(db.Query):
+  """A query that returns no results."""
+
+  def get(self):
+    return None
+
+  def fetch(self, limit=1000, offset=0):
+    return []
+
+  def count(self, limit=1000):
+    return 0
+
+
+class EmptyDatastoreQuery(datastore.Query):
+  """A query that returns no results."""
+
+  def __init__(self, kind):
+    datastore.Query.__init__(self, kind)
+
+  def _Run(self, *unused_args, **unused_kwargs):
+    empty_result_pb = datastore_pb.QueryResult()
+    empty_result_pb.set_cursor(0)
+    empty_result_pb.set_more_results(False)
+    return datastore.Iterator(empty_result_pb)
+
+  def Count(self, *unused_args, **unused_kwargs):
+    return 0
+
+  def Get(self, *unused_args, **unused_kwargs):
+    return []
+
+  def Next(self, *unused_args, **unused_kwargs):
+    return []
+
+
+class KeyRange(object):
+  """Represents a range of keys in the datastore.
+
+  A KeyRange object represents a key range
+    (key_start, include_start, key_end, include_end)
+  and a scan direction (KeyRange.DESC or KeyRange.ASC).
+  """
+
+  DESC = 'DESC'
+  ASC = 'ASC'
+
+  def __init__(self,
+               key_start=None,
+               key_end=None,
+               direction=None,
+               include_start=True,
+               include_end=True):
+    """Initialize a KeyRange object.
+
+    Args:
+      key_start: The starting key for this range.
+      key_end: The ending key for this range.
+      direction: The direction of the query for this range.
+      include_start: Whether the start key should be included in the range.
+      include_end: Whether the end key should be included in the range.
+    """
+    if direction is None:
+      direction = KeyRange.ASC
+    assert direction in (KeyRange.ASC, KeyRange.DESC)
+    self.direction = direction
+    self.key_start = key_start
+    self.key_end = key_end
+    self.include_start = include_start
+    self.include_end = include_end
+
+  def __str__(self):
+    if self.include_start:
+      left_side = '['
+    else:
+      left_side = '('
+    if self.include_end:
+      right_side = ']'
+    else:
+      right_side = '('
+    return '%s%s%s-%s%s' % (self.direction, left_side, repr(self.key_start),
+                            repr(self.key_end), right_side)
+
+  def __repr__(self):
+    return ('key_range.KeyRange(key_start=%s,key_end=%s,direction=%s,'
+            'include_start=%s,include_end=%s)') % (repr(self.key_start),
+                                                   repr(self.key_end),
+                                                   repr(self.direction),
+                                                   repr(self.include_start),
+                                                   repr(self.include_end))
+
+  def filter_query(self, query):
+    """Add query filter to restrict to this key range.
+
+    Args:
+      query: A db.Query instance.
+
+    Returns:
+      The input query restricted to this key range or an empty query if
+      this key range is empty.
+    """
+    assert isinstance(query, db.Query)
+    if self.key_start == self.key_end and not (
+        self.include_start or self.include_end):
+      return EmptyDbQuery()
+    if self.include_start:
+      start_comparator = '>='
+    else:
+      start_comparator = '>'
+    if self.include_end:
+      end_comparator = '<='
+    else:
+      end_comparator = '<'
+    if self.key_start:
+      query.filter('__key__ %s' % start_comparator, self.key_start)
+    if self.key_end:
+      query.filter('__key__ %s' % end_comparator, self.key_end)
+    return query
+
+  def filter_datastore_query(self, query):
+    """Add query filter to restrict to this key range.
+
+    Args:
+      query: A datastore.Query instance.
+
+    Returns:
+      The input query restricted to this key range or an empty query if
+      this key range is empty.
+    """
+    assert isinstance(query, datastore.Query)
+    if self.key_start == self.key_end and not (
+        self.include_start or self.include_end):
+      return EmptyDatastoreQuery(query.kind)
+    if self.include_start:
+      start_comparator = '>='
+    else:
+      start_comparator = '>'
+    if self.include_end:
+      end_comparator = '<='
+    else:
+      end_comparator = '<'
+    if self.key_start:
+      query.update({'__key__ %s' % start_comparator: self.key_start})
+    if self.key_end:
+      query.update({'__key__ %s' % end_comparator: self.key_end})
+    return query
+
+  def __get_direction(self, asc, desc):
+    """Check that self.direction is in (KeyRange.ASC, KeyRange.DESC).
+
+    Args:
+      asc: Argument to return if self.direction is KeyRange.ASC
+      desc: Argument to return if self.direction is KeyRange.DESC
+
+    Returns:
+      asc or desc appropriately
+
+    Raises:
+      KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+    """
+    if self.direction == KeyRange.ASC:
+      return asc
+    elif self.direction == KeyRange.DESC:
+      return desc
+    else:
+      raise KeyRangeError('KeyRange direction unexpected: %s', self.direction)
+
+  def make_directed_query(self, kind_class):
+    """Construct a query for this key range, including the scan direction.
+
+    Args:
+      kind_class: A kind implementation class.
+
+    Returns:
+      A db.Query instance.
+
+    Raises:
+      KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+    """
+    direction = self.__get_direction('', '-')
+    query = db.Query(kind_class)
+    query.order('%s__key__' % direction)
+
+    query = self.filter_query(query)
+    return query
+
+  def make_directed_datastore_query(self, kind):
+    """Construct a query for this key range, including the scan direction.
+
+    Args:
+      kind: A string.
+
+    Returns:
+      A datastore.Query instance.
+
+    Raises:
+      KeyRangeError: if self.direction is not in (KeyRange.ASC, KeyRange.DESC).
+    """
+    direction = self.__get_direction(datastore.Query.ASCENDING,
+                                     datastore.Query.DESCENDING)
+    query = datastore.Query(kind)
+    query.Order(('__key__', direction))
+
+    query = self.filter_datastore_query(query)
+    return query
+
+  def make_ascending_query(self, kind_class):
+    """Construct a query for this key range without setting the scan direction.
+
+    Args:
+      kind_class: A kind implementation class.
+
+    Returns:
+      A db.Query instance.
+    """
+    query = db.Query(kind_class)
+    query.order('__key__')
+
+    query = self.filter_query(query)
+    return query
+
+  def make_ascending_datastore_query(self, kind):
+    """Construct a query for this key range without setting the scan direction.
+
+    Args:
+      kind: A string.
+
+    Returns:
+      A datastore.Query instance.
+    """
+    query = datastore.Query(kind)
+    query.Order(('__key__', datastore.Query.ASCENDING))
+
+    query = self.filter_datastore_query(query)
+    return query
+
+  def split_range(self, batch_size=0):
+    """Split this key range into a list of at most two ranges.
+
+    This method attempts to split the key range approximately in half.
+    Numeric ranges are split in the middle into two equal ranges and
+    string ranges are split lexicographically in the middle.  If the
+    key range is smaller than batch_size it is left unsplit.
+
+    Note that splitting is done without knowledge of the distribution
+    of actual entities in the key range, so there is no guarantee (nor
+    any particular reason to believe) that the entities of the range
+    are evenly split.
+
+    Args:
+      batch_size: The maximum size of a key range that should not be split.
+
+    Returns:
+      A list of one or two key ranges covering the same space as this range.
+    """
+    key_start = self.key_start
+    key_end = self.key_end
+    include_start = self.include_start
+    include_end = self.include_end
+
+    key_pairs = []
+    if not key_start:
+      key_pairs.append((key_start, include_start, key_end, include_end,
+                        KeyRange.ASC))
+    elif not key_end:
+      key_pairs.append((key_start, include_start, key_end, include_end,
+                        KeyRange.DESC))
+    else:
+      key_split = KeyRange.split_keys(key_start, key_end, batch_size)
+      first_include_end = True
+      if key_split == key_start:
+        first_include_end = first_include_end and include_start
+
+      key_pairs.append((key_start, include_start,
+                        key_split, first_include_end,
+                        KeyRange.DESC))
+
+      second_include_end = include_end
+      if key_split == key_end:
+        second_include_end = False
+      key_pairs.append((key_split, False,
+                        key_end, second_include_end,
+                        KeyRange.ASC))
+
+    ranges = [KeyRange(key_start=start,
+                       include_start=include_start,
+                       key_end=end,
+                       include_end=include_end,
+                       direction=direction)
+              for (start, include_start, end, include_end, direction)
+              in key_pairs]
+
+    return ranges
+
+  def __cmp__(self, other):
+    """Compare two key ranges.
+
+    Key ranges with a value of None for key_start or key_end, are always
+    considered to have include_start=False or include_end=False, respectively,
+    when comparing.  Since None indicates an unbounded side of the range,
+    the include specifier is meaningless.  The ordering generated is total
+    but somewhat arbitrary.
+
+    Args:
+      other: An object to compare to this one.
+
+    Returns:
+      -1: if this key range is less than other.
+      0:  if this key range is equal to other.
+      1: if this key range is greater than other.
+    """
+    if not isinstance(other, KeyRange):
+      return 1
+
+    self_list = [self.key_start, self.key_end, self.direction,
+                 self.include_start, self.include_end]
+    if not self.key_start:
+      self_list[3] = False
+    if not self.key_end:
+      self_list[4] = False
+
+    other_list = [other.key_start,
+                  other.key_end,
+                  other.direction,
+                  other.include_start,
+                  other.include_end]
+    if not other.key_start:
+      other_list[3] = False
+    if not other.key_end:
+      other_list[4] = False
+
+    return cmp(self_list, other_list)
+
+  @staticmethod
+  def bisect_string_range(start, end):
+    """Returns a string that is approximately in the middle of the range.
+
+    (start, end) is treated as a string range, and it is assumed
+    start <= end in the usual lexicographic string ordering. The output key
+    mid is guaranteed to satisfy start <= mid <= end.
+
+    The method proceeds by comparing initial characters of start and
+    end.  When the characters are equal, they are appended to the mid
+    string.  In the first place that the characters differ, the
+    difference characters are averaged and this average is appended to
+    the mid string.  If averaging resulted in rounding down, and
+    additional character is added to the mid string to make up for the
+    rounding down.  This extra step is necessary for correctness in
+    the case that the average of the two characters is equal to the
+    character in the start string.
+
+    This method makes the assumption that most keys are ascii and it
+    attempts to perform splitting within the ascii range when that
+    results in a valid split.
+
+    Args:
+      start: A string.
+      end: A string such that start <= end.
+
+    Returns:
+      A string mid such that start <= mid <= end.
+    """
+    if start == end:
+      return start
+    start += '\0'
+    end += '\0'
+    midpoint = []
+    expected_max = 127
+    for i in xrange(min(len(start), len(end))):
+      if start[i] == end[i]:
+        midpoint.append(start[i])
+      else:
+        ord_sum = ord(start[i]) + ord(end[i])
+        midpoint.append(unichr(ord_sum / 2))
+        if ord_sum % 2:
+          if len(start) > i + 1:
+            ord_start = ord(start[i+1])
+          else:
+            ord_start = 0
+          if ord_start < expected_max:
+            ord_split = (expected_max + ord_start) / 2
+          else:
+            ord_split = (0xFFFF + ord_start) / 2
+          midpoint.append(unichr(ord_split))
+        break
+    return ''.join(midpoint)
+
+  @staticmethod
+  def split_keys(key_start, key_end, batch_size):
+    """Return a key that is between key_start and key_end inclusive.
+
+    This method compares components of the ancestor paths of key_start
+    and key_end.  The first place in the path that differs is
+    approximately split in half.  If the kind components differ, a new
+    non-existent kind halfway between the two is used to split the
+    space. If the id_or_name components differ, then a new id_or_name
+    that is halfway between the two is selected.  If the lower
+    id_or_name is numeric and the upper id_or_name is a string, then
+    the minumum string key u'\0' is used as the split id_or_name.  The
+    key that is returned is the shared portion of the ancestor path
+    followed by the generated split component.
+
+    Args:
+      key_start: A db.Key instance for the lower end of a range.
+      key_end: A db.Key instance for the upper end of a range.
+      batch_size: The maximum size of a range that should not be split.
+
+    Returns:
+      A db.Key instance, k, such that key_start <= k <= key_end.
+    """
+    assert key_start.app() == key_end.app()
+    path1 = key_start.to_path()
+    path2 = key_end.to_path()
+    len1 = len(path1)
+    len2 = len(path2)
+    assert len1 % 2 == 0
+    assert len2 % 2 == 0
+    out_path = []
+    min_path_len = min(len1, len2) / 2
+    for i in xrange(min_path_len):
+      kind1 = path1[2*i]
+      kind2 = path2[2*i]
+
+      if kind1 != kind2:
+        split_kind = KeyRange.bisect_string_range(kind1, kind2)
+        out_path.append(split_kind)
+        out_path.append(unichr(0))
+        break
+
+      last = (len1 == len2 == 2*(i + 1))
+
+      id_or_name1 = path1[2*i + 1]
+      id_or_name2 = path2[2*i + 1]
+      id_or_name_split = KeyRange._split_id_or_name(
+          id_or_name1, id_or_name2, batch_size, last)
+      if id_or_name1 == id_or_name_split:
+        out_path.append(kind1)
+        out_path.append(id_or_name1)
+      else:
+        out_path.append(kind1)
+        out_path.append(id_or_name_split)
+        break
+
+    return db.Key.from_path(*out_path)
+
+  @staticmethod
+  def _split_id_or_name(id_or_name1, id_or_name2, batch_size, maintain_batches):
+    """Return an id_or_name that is between id_or_name1 an id_or_name2.
+
+    Attempts to split the range [id_or_name1, id_or_name2] in half,
+    unless maintain_batches is true and the size of the range
+    [id_or_name1, id_or_name2] is less than or equal to batch_size.
+
+    Args:
+      id_or_name1: A number or string or the id_or_name component of a key
+      id_or_name2: A number or string or the id_or_name component of a key
+      batch_size: The range size that will not be split if maintain_batches
+        is true.
+      maintain_batches: A boolean for whether to keep small ranges intact.
+
+    Returns:
+      An id_or_name such that id_or_name1 <= id_or_name <= id_or_name2.
+    """
+    if (isinstance(id_or_name1, (int, long)) and
+        isinstance(id_or_name2, (int, long))):
+      if not maintain_batches or id_or_name2 - id_or_name1 > batch_size:
+        return (id_or_name1 + id_or_name2) / 2
+      else:
+        return id_or_name1
+    elif (isinstance(id_or_name1, basestring) and
+          isinstance(id_or_name2, basestring)):
+      return KeyRange.bisect_string_range(id_or_name1, id_or_name2)
+    else:
+      assert (isinstance(id_or_name1, (int, long)) and
+              isinstance(id_or_name2, basestring))
+      return unichr(0)
+
+  def to_json(self):
+    """Serialize KeyRange to json.
+
+    Returns:
+      string with KeyRange json representation.
+    """
+    if simplejson is None:
+      raise SimplejsonUnavailableError(
+          "JSON functionality requires simplejson to be available")
+
+    def key_to_str(key):
+      if key:
+        return str(key)
+      else:
+        return None
+
+    return simplejson.dumps({
+        "direction": self.direction,
+        "key_start": key_to_str(self.key_start),
+        "key_end": key_to_str(self.key_end),
+        "include_start": self.include_start,
+        "include_end": self.include_end,
+        }, sort_keys=True)
+
+
+  @staticmethod
+  def from_json(json_str):
+    """Deserialize KeyRange from its json representation.
+
+    Args:
+      json_str: string with json representation created by key_range_to_json.
+
+    Returns:
+      deserialized KeyRange instance.
+    """
+    if simplejson is None:
+      raise SimplejsonUnavailableError(
+          "JSON functionality requires simplejson to be available")
+
+    def key_from_str(key_str):
+      if key_str:
+        return db.Key(key_str)
+      else:
+        return None
+
+    json = simplejson.loads(json_str)
+    return KeyRange(key_from_str(json["key_start"]),
+                    key_from_str(json["key_end"]),
+                    json["direction"],
+                    json["include_start"],
+                    json["include_end"])
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/handler.py	Sun Sep 06 23:31:53 2009 +0200
@@ -48,7 +48,13 @@
 from google.appengine.api import api_base_pb
 from google.appengine.api import apiproxy_stub
 from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_errors
+from google.appengine.api import mail_service_pb
+from google.appengine.api import urlfetch_service_pb
 from google.appengine.api import users
+from google.appengine.api.capabilities import capability_service_pb
+from google.appengine.api.images import images_service_pb
+from google.appengine.api.memcache import memcache_service_pb
 from google.appengine.datastore import datastore_pb
 from google.appengine.ext import webapp
 from google.appengine.ext.remote_api import remote_api_pb
@@ -76,6 +82,10 @@
     runquery_response = datastore_pb.QueryResult()
     apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery',
                                    request, runquery_response)
+    if runquery_response.result_size() > 0:
+      response.CopyFrom(runquery_response)
+      return
+
     next_request = datastore_pb.NextRequest()
     next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
     next_request.set_count(request.limit())
@@ -154,18 +164,52 @@
 
 
 SERVICE_PB_MAP = {
+    'capability_service': {
+        'IsEnabled': (capability_service_pb.IsEnabledRequest,
+                      capability_service_pb.IsEnabledResponse),
+    },
     'datastore_v3': {
-        'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse),
-        'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse),
-        'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
-        'Count': (datastore_pb.Query, api_base_pb.Integer64Proto),
+        'Get':        (datastore_pb.GetRequest, datastore_pb.GetResponse),
+        'Put':        (datastore_pb.PutRequest, datastore_pb.PutResponse),
+        'Delete':     (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
+        'Count':      (datastore_pb.Query, api_base_pb.Integer64Proto),
         'GetIndices': (api_base_pb.StringProto, datastore_pb.CompositeIndices),
     },
+    'images': {
+        'Transform': (images_service_pb.ImagesTransformRequest,
+                      images_service_pb.ImagesTransformResponse),
+        'Composite': (images_service_pb.ImagesCompositeRequest,
+                      images_service_pb.ImagesCompositeResponse),
+        'Histogram': (images_service_pb.ImagesHistogramRequest,
+                      images_service_pb.ImagesHistogramResponse),
+    },
+    'mail': {
+        'Send':         (mail_service_pb.MailMessage, api_base_pb.VoidProto),
+        'SendToAdmins': (mail_service_pb.MailMessage, api_base_pb.VoidProto),
+    },
+    'memcache': {
+        'Get':       (memcache_service_pb.MemcacheGetRequest,
+                      memcache_service_pb.MemcacheGetResponse),
+        'Set':       (memcache_service_pb.MemcacheSetRequest,
+                      memcache_service_pb.MemcacheSetResponse),
+        'Delete':    (memcache_service_pb.MemcacheDeleteRequest,
+                      memcache_service_pb.MemcacheDeleteResponse),
+        'Increment': (memcache_service_pb.MemcacheIncrementRequest,
+                      memcache_service_pb.MemcacheIncrementResponse),
+        'FlushAll':  (memcache_service_pb.MemcacheFlushRequest,
+                      memcache_service_pb.MemcacheFlushResponse),
+        'Stats':     (memcache_service_pb.MemcacheStatsRequest,
+                      memcache_service_pb.MemcacheStatsResponse),
+    },
     'remote_datastore': {
-        'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
+        'RunQuery':    (datastore_pb.Query, datastore_pb.QueryResult),
         'Transaction': (remote_api_pb.TransactionRequest,
-                             datastore_pb.PutResponse),
-        'GetIDs': (remote_api_pb.PutRequest, datastore_pb.PutResponse),
+                        datastore_pb.PutResponse),
+        'GetIDs':      (remote_api_pb.PutRequest, datastore_pb.PutResponse),
+    },
+    'urlfetch': {
+        'Fetch': (urlfetch_service_pb.URLFetchRequest,
+                  urlfetch_service_pb.URLFetchResponse),
     },
 }
 
@@ -187,6 +231,7 @@
     elif 'X-appcfg-api-version' not in self.request.headers:
       self.response.set_status(403)
       self.response.out.write("This request did not contain a necessary header")
+      self.response.headers['Content-Type'] = 'text/plain'
       return False
     return True
 
@@ -202,6 +247,7 @@
         'rtok': rtok
         }
 
+    self.response.headers['Content-Type'] = 'text/plain'
     self.response.out.write(yaml.dump(app_info))
 
   def post(self):
@@ -221,6 +267,10 @@
       logging.exception('Exception while handling %s', request)
       self.response.set_status(200)
       response.mutable_exception().set_contents(pickle.dumps(e))
+      if isinstance(e, datastore_errors.Error):
+        application_error = response.mutable_application_error()
+        application_error.setCode(e.application_error)
+        application_error.setDetail(e.error_detail)
     self.response.out.write(response.Encode())
 
   def ExecuteRequest(self, request):
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_pb.py	Sun Sep 06 23:31:53 2009 +0200
@@ -155,29 +155,144 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kservice_name = 2
   kmethod = 3
   krequest = 4
 
-  _TEXT = (
-   "ErrorCode",
-   None,
-   "service_name",
-   "method",
-   "request",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    2: "service_name",
+    3: "method",
+    4: "request",
+  }, 4)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+  _STYLE = """"""
+  _STYLE_CONTENT_TYPE = """"""
+class ApplicationError(ProtocolBuffer.ProtocolMessage):
+  has_code_ = 0
+  code_ = 0
+  has_detail_ = 0
+  detail_ = ""
+
+  def __init__(self, contents=None):
+    if contents is not None: self.MergeFromString(contents)
+
+  def code(self): return self.code_
+
+  def set_code(self, x):
+    self.has_code_ = 1
+    self.code_ = x
+
+  def clear_code(self):
+    if self.has_code_:
+      self.has_code_ = 0
+      self.code_ = 0
+
+  def has_code(self): return self.has_code_
+
+  def detail(self): return self.detail_
+
+  def set_detail(self, x):
+    self.has_detail_ = 1
+    self.detail_ = x
+
+  def clear_detail(self):
+    if self.has_detail_:
+      self.has_detail_ = 0
+      self.detail_ = ""
+
+  def has_detail(self): return self.has_detail_
+
+
+  def MergeFrom(self, x):
+    assert x is not self
+    if (x.has_code()): self.set_code(x.code())
+    if (x.has_detail()): self.set_detail(x.detail())
+
+  def Equals(self, x):
+    if x is self: return 1
+    if self.has_code_ != x.has_code_: return 0
+    if self.has_code_ and self.code_ != x.code_: return 0
+    if self.has_detail_ != x.has_detail_: return 0
+    if self.has_detail_ and self.detail_ != x.detail_: return 0
+    return 1
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.MAX_TYPE,
+  def IsInitialized(self, debug_strs=None):
+    initialized = 1
+    if (not self.has_code_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: code not set.')
+    if (not self.has_detail_):
+      initialized = 0
+      if debug_strs is not None:
+        debug_strs.append('Required field: detail not set.')
+    return initialized
 
-   ProtocolBuffer.Encoder.STRING,
+  def ByteSize(self):
+    n = 0
+    n += self.lengthVarInt64(self.code_)
+    n += self.lengthString(len(self.detail_))
+    return n + 2
+
+  def Clear(self):
+    self.clear_code()
+    self.clear_detail()
+
+  def OutputUnchecked(self, out):
+    out.putVarInt32(8)
+    out.putVarInt32(self.code_)
+    out.putVarInt32(18)
+    out.putPrefixedString(self.detail_)
 
-   ProtocolBuffer.Encoder.STRING,
+  def TryMerge(self, d):
+    while d.avail() > 0:
+      tt = d.getVarInt32()
+      if tt == 8:
+        self.set_code(d.getVarInt32())
+        continue
+      if tt == 18:
+        self.set_detail(d.getPrefixedString())
+        continue
+      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      d.skipData(tt)
+
+
+  def __str__(self, prefix="", printElemNumber=0):
+    res=""
+    if self.has_code_: res+=prefix+("code: %s\n" % self.DebugFormatInt32(self.code_))
+    if self.has_detail_: res+=prefix+("detail: %s\n" % self.DebugFormatString(self.detail_))
+    return res
 
-   ProtocolBuffer.Encoder.STRING,
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+  kcode = 1
+  kdetail = 2
 
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "code",
+    2: "detail",
+  }, 2)
+
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.NUMERIC,
+    2: ProtocolBuffer.Encoder.STRING,
+  }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -186,6 +301,10 @@
   response_ = None
   has_exception_ = 0
   exception_ = None
+  has_application_error_ = 0
+  application_error_ = None
+  has_java_exception_ = 0
+  java_exception_ = None
 
   def __init__(self, contents=None):
     self.lazy_init_lock_ = thread.allocate_lock()
@@ -227,11 +346,49 @@
 
   def has_exception(self): return self.has_exception_
 
+  def application_error(self):
+    if self.application_error_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.application_error_ is None: self.application_error_ = ApplicationError()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.application_error_
+
+  def mutable_application_error(self): self.has_application_error_ = 1; return self.application_error()
+
+  def clear_application_error(self):
+    if self.has_application_error_:
+      self.has_application_error_ = 0;
+      if self.application_error_ is not None: self.application_error_.Clear()
+
+  def has_application_error(self): return self.has_application_error_
+
+  def java_exception(self):
+    if self.java_exception_ is None:
+      self.lazy_init_lock_.acquire()
+      try:
+        if self.java_exception_ is None: self.java_exception_ = RawMessage()
+      finally:
+        self.lazy_init_lock_.release()
+    return self.java_exception_
+
+  def mutable_java_exception(self): self.has_java_exception_ = 1; return self.java_exception()
+
+  def clear_java_exception(self):
+    if self.has_java_exception_:
+      self.has_java_exception_ = 0;
+      if self.java_exception_ is not None: self.java_exception_.Clear()
+
+  def has_java_exception(self): return self.has_java_exception_
+
 
   def MergeFrom(self, x):
     assert x is not self
     if (x.has_response()): self.mutable_response().MergeFrom(x.response())
     if (x.has_exception()): self.mutable_exception().MergeFrom(x.exception())
+    if (x.has_application_error()): self.mutable_application_error().MergeFrom(x.application_error())
+    if (x.has_java_exception()): self.mutable_java_exception().MergeFrom(x.java_exception())
 
   def Equals(self, x):
     if x is self: return 1
@@ -239,23 +396,33 @@
     if self.has_response_ and self.response_ != x.response_: return 0
     if self.has_exception_ != x.has_exception_: return 0
     if self.has_exception_ and self.exception_ != x.exception_: return 0
+    if self.has_application_error_ != x.has_application_error_: return 0
+    if self.has_application_error_ and self.application_error_ != x.application_error_: return 0
+    if self.has_java_exception_ != x.has_java_exception_: return 0
+    if self.has_java_exception_ and self.java_exception_ != x.java_exception_: return 0
     return 1
 
   def IsInitialized(self, debug_strs=None):
     initialized = 1
     if (self.has_response_ and not self.response_.IsInitialized(debug_strs)): initialized = 0
     if (self.has_exception_ and not self.exception_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_application_error_ and not self.application_error_.IsInitialized(debug_strs)): initialized = 0
+    if (self.has_java_exception_ and not self.java_exception_.IsInitialized(debug_strs)): initialized = 0
     return initialized
 
   def ByteSize(self):
     n = 0
     if (self.has_response_): n += 1 + self.lengthString(self.response_.ByteSize())
     if (self.has_exception_): n += 1 + self.lengthString(self.exception_.ByteSize())
+    if (self.has_application_error_): n += 1 + self.lengthString(self.application_error_.ByteSize())
+    if (self.has_java_exception_): n += 1 + self.lengthString(self.java_exception_.ByteSize())
     return n + 0
 
   def Clear(self):
     self.clear_response()
     self.clear_exception()
+    self.clear_application_error()
+    self.clear_java_exception()
 
   def OutputUnchecked(self, out):
     if (self.has_response_):
@@ -266,6 +433,14 @@
       out.putVarInt32(18)
       out.putVarInt32(self.exception_.ByteSize())
       self.exception_.OutputUnchecked(out)
+    if (self.has_application_error_):
+      out.putVarInt32(26)
+      out.putVarInt32(self.application_error_.ByteSize())
+      self.application_error_.OutputUnchecked(out)
+    if (self.has_java_exception_):
+      out.putVarInt32(34)
+      out.putVarInt32(self.java_exception_.ByteSize())
+      self.java_exception_.OutputUnchecked(out)
 
   def TryMerge(self, d):
     while d.avail() > 0:
@@ -282,6 +457,18 @@
         d.skip(length)
         self.mutable_exception().TryMerge(tmp)
         continue
+      if tt == 26:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_application_error().TryMerge(tmp)
+        continue
+      if tt == 34:
+        length = d.getVarInt32()
+        tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+        d.skip(length)
+        self.mutable_java_exception().TryMerge(tmp)
+        continue
       if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
       d.skipData(tt)
 
@@ -296,24 +483,40 @@
       res+=prefix+"exception <\n"
       res+=self.exception_.__str__(prefix + "  ", printElemNumber)
       res+=prefix+">\n"
+    if self.has_application_error_:
+      res+=prefix+"application_error <\n"
+      res+=self.application_error_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
+    if self.has_java_exception_:
+      res+=prefix+"java_exception <\n"
+      res+=self.java_exception_.__str__(prefix + "  ", printElemNumber)
+      res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kresponse = 1
   kexception = 2
-
-  _TEXT = (
-   "ErrorCode",
-   "response",
-   "exception",
-  )
+  kapplication_error = 3
+  kjava_exception = 4
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STRING,
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "response",
+    2: "exception",
+    3: "application_error",
+    4: "java_exception",
+  }, 4)
 
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STRING,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+  }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
@@ -572,36 +775,35 @@
       res+=prefix+">\n"
     return res
 
+
+  def _BuildTagLookupTable(sparse, maxtag, default=None):
+    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
   kPreconditionGroup = 1
   kPreconditionkey = 2
   kPreconditionhash = 3
   kputs = 4
   kdeletes = 5
 
-  _TEXT = (
-   "ErrorCode",
-   "Precondition",
-   "key",
-   "hash",
-   "puts",
-   "deletes",
-  )
+  _TEXT = _BuildTagLookupTable({
+    0: "ErrorCode",
+    1: "Precondition",
+    2: "key",
+    3: "hash",
+    4: "puts",
+    5: "deletes",
+  }, 5)
 
-  _TYPES = (
-   ProtocolBuffer.Encoder.NUMERIC,
-   ProtocolBuffer.Encoder.STARTGROUP,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-   ProtocolBuffer.Encoder.STRING,
-
-  )
+  _TYPES = _BuildTagLookupTable({
+    0: ProtocolBuffer.Encoder.NUMERIC,
+    1: ProtocolBuffer.Encoder.STARTGROUP,
+    2: ProtocolBuffer.Encoder.STRING,
+    3: ProtocolBuffer.Encoder.STRING,
+    4: ProtocolBuffer.Encoder.STRING,
+    5: ProtocolBuffer.Encoder.STRING,
+  }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
 
   _STYLE = """"""
   _STYLE_CONTENT_TYPE = """"""
 
-__all__ = ['Request','Response','TransactionRequest','TransactionRequest_Precondition']
+__all__ = ['Request','ApplicationError','Response','TransactionRequest','TransactionRequest_Precondition']
--- a/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/remote_api_stub.py	Sun Sep 06 23:31:53 2009 +0200
@@ -71,6 +71,8 @@
 import threading
 import yaml
 
+from google.appengine.api import datastore
+from google.appengine.api import apiproxy_rpc
 from google.appengine.api import apiproxy_stub_map
 from google.appengine.datastore import datastore_pb
 from google.appengine.ext.remote_api import remote_api_pb
@@ -86,6 +88,10 @@
   """Exception for configuration errors."""
 
 
+class UnknownJavaServerError(Error):
+  """Exception for exceptions returned from a Java remote_api handler."""
+
+
 def GetUserAgent():
   """Determines the value of the 'User-agent' header to use for HTTP requests.
 
@@ -136,20 +142,41 @@
     self._server = server
     self._path = path
 
+  def _PreHookHandler(self, service, call, request, response):
+    pass
+
+  def _PostHookHandler(self, service, call, request, response):
+    pass
+
   def MakeSyncCall(self, service, call, request, response):
+    self._PreHookHandler(service, call, request, response)
     request_pb = remote_api_pb.Request()
     request_pb.set_service_name(service)
     request_pb.set_method(call)
     request_pb.mutable_request().set_contents(request.Encode())
 
     response_pb = remote_api_pb.Response()
-    response_pb.ParseFromString(self._server.Send(self._path,
-                                                  request_pb.Encode()))
+    encoded_request = request_pb.Encode()
+    encoded_response = self._server.Send(self._path, encoded_request)
+    response_pb.ParseFromString(encoded_response)
 
-    if response_pb.has_exception():
-      raise pickle.loads(response_pb.exception().contents())
-    else:
-      response.ParseFromString(response_pb.response().contents())
+    try:
+      if response_pb.has_application_error():
+        error_pb = response_pb.application_error()
+        raise datastore._ToDatastoreError(
+            apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()))
+      elif response_pb.has_exception():
+        raise pickle.loads(response_pb.exception().contents())
+      elif response_pb.has_java_exception():
+        raise UnknownJavaServerError("An unknown error has occured in the "
+                                     "Java remote_api handler for this call.")
+      else:
+        response.ParseFromString(response_pb.response().contents())
+    finally:
+      self._PostHookHandler(service, call, request, response)
+
+  def CreateRPC(self):
+    return apiproxy_rpc.RPC(stub=self)
 
 
 class RemoteDatastoreStub(RemoteStub):
@@ -192,10 +219,12 @@
       self.__next_local_cursor += 1
     finally:
       self.__local_cursor_lock.release()
+    query.clear_count()
     self.__queries[cursor_id] = query
 
     query_result.mutable_cursor().set_cursor(cursor_id)
     query_result.set_more_results(True)
+    query_result.set_keys_only(query.keys_only())
 
   def _Dynamic_Next(self, next_request, query_result):
     cursor = next_request.cursor().cursor()
@@ -214,6 +243,7 @@
       request.set_limit(min(request.limit(), next_request.count()))
     else:
       request.set_limit(next_request.count())
+    request.set_count(request.limit())
 
     super(RemoteDatastoreStub, self).MakeSyncCall(
         'remote_datastore', 'RunQuery', request, query_result)
@@ -229,8 +259,8 @@
     if get_request.has_transaction():
       txid = get_request.transaction().handle()
       txdata = self.__transactions[txid]
-      assert (txdata.thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata.thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
 
       keys = [(k, k.Encode()) for k in get_request.key_list()]
 
@@ -296,8 +326,8 @@
 
       txid = put_request.transaction().handle()
       txdata = self.__transactions[txid]
-      assert (txdata.thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata.thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
       for entity in entities:
         txdata.entities[entity.key().Encode()] = (entity.key(), entity)
         put_response.add_key().CopyFrom(entity.key())
@@ -309,8 +339,8 @@
     if delete_request.has_transaction():
       txid = delete_request.transaction().handle()
       txdata = self.__transactions[txid]
-      assert (txdata.thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata.thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
       for key in delete_request.key_list():
         txdata.entities[key.Encode()] = (key, None)
     else:
@@ -335,8 +365,8 @@
           'Transaction %d not found.' % (txid,))
 
     txdata = self.__transactions[txid]
-    assert (txdata.thread_id == thread.get_ident(),
-            "Transactions are single-threaded.")
+    assert (txdata.thread_id ==
+        thread.get_ident()), "Transactions are single-threaded."
     del self.__transactions[txid]
 
     tx = remote_api_pb.TransactionRequest()
@@ -367,8 +397,8 @@
             datastore_pb.Error.BAD_REQUEST,
             'Transaction %d not found.' % (txid,))
 
-      assert (txdata[txid].thread_id == thread.get_ident(),
-              "Transactions are single-threaded.")
+      assert (txdata[txid].thread_id ==
+          thread.get_ident()), "Transactions are single-threaded."
       del self.__transactions[txid]
     finally:
       self.__local_tx_lock.release()
@@ -386,14 +416,14 @@
         'The remote datastore does not support index manipulation.')
 
 
-def ConfigureRemoteDatastore(app_id,
-                             path,
-                             auth_func,
-                             servername=None,
-                             rpc_server_factory=appengine_rpc.HttpRpcServer,
-                             rtok=None,
-                             secure=False):
-  """Does necessary setup to allow easy remote access to an AppEngine datastore.
+def ConfigureRemoteApi(app_id,
+                       path,
+                       auth_func,
+                       servername=None,
+                       rpc_server_factory=appengine_rpc.HttpRpcServer,
+                       rtok=None,
+                       secure=False):
+  """Does necessary setup to allow easy remote access to App Engine APIs.
 
   Either servername must be provided or app_id must not be None.  If app_id
   is None and a servername is provided, this function will send a request
@@ -438,10 +468,32 @@
     if not app_info or 'rtok' not in app_info or 'app_id' not in app_info:
       raise ConfigurationError('Error parsing app_id lookup response')
     if app_info['rtok'] != rtok:
-      raise ConfigurationError('Token validation failed during app_id lookup.')
+      raise ConfigurationError('Token validation failed during app_id lookup. '
+                               '(sent %s, got %s)' % (repr(rtok),
+                                                      repr(app_info['rtok'])))
     app_id = app_info['app_id']
 
   os.environ['APPLICATION_ID'] = app_id
   apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
-  stub = RemoteDatastoreStub(server, path)
-  apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
+  datastore_stub = RemoteDatastoreStub(server, path)
+  apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore_stub)
+  stub = RemoteStub(server, path)
+  for service in ['capability_service', 'images', 'mail', 'memcache',
+                  'urlfetch']:
+    apiproxy_stub_map.apiproxy.RegisterStub(service, stub)
+
+
+def MaybeInvokeAuthentication():
+  """Sends an empty request through to the configured end-point.
+
+  If authentication is necessary, this will cause the rpc_server to invoke
+  interactive authentication.
+  """
+  datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+  if isinstance(datastore_stub, RemoteStub):
+    datastore_stub._server.Send(datastore_stub._path, payload=None)
+  else:
+    raise ConfigurationError('remote_api is not configured.')
+
+
+ConfigureRemoteDatastore = ConfigureRemoteApi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/remote_api/throttle.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,637 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Client-side transfer throttling for use with remote_api_stub.
+
+This module is used to configure rate limiting for programs accessing
+AppEngine services through remote_api.
+
+See the Throttle class for more information.
+
+An example with throttling:
+---
+from google.appengine.ext import db
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.ext.remote_api import throttle
+from myapp import models
+import getpass
+import threading
+
+def auth_func():
+  return (raw_input('Username:'), getpass.getpass('Password:'))
+
+remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
+full_throttle = throttle.DefaultThrottle(multiplier=1.0)
+throttle.ThrottleRemoteDatastore(full_throttle)
+
+# Register any threads that will be using the datastore with the throttler
+full_throttle.Register(threading.currentThread())
+
+# Now you can access the remote datastore just as if your code was running on
+# App Engine, and you don't need to worry about exceeding quota limits!
+
+houses = models.House.all().fetch(100)
+for a_house in houses:
+  a_house.doors += 1
+db.put(houses)
+---
+
+This example limits usage to the default free quota levels.  The multiplier
+kwarg to throttle.DefaultThrottle can be used to scale the throttle levels
+higher or lower.
+
+Throttles can also be constructed directly for more control over the limits
+for different operations.  See the Throttle class and the constants following
+it for details.
+"""
+
+
+import logging
+import threading
+import time
+import urllib2
+import urlparse
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.tools import appengine_rpc
+
+logger = logging.getLogger('google.appengine.ext.remote_api.throttle')
+
+MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
+
+
+class Error(Exception):
+  """Base class for errors in this module."""
+
+
+class ThreadNotRegisteredError(Error):
+  """An unregistered thread has accessed the throttled datastore stub."""
+
+
+class UnknownThrottleNameError(Error):
+  """A transfer was added for an unknown throttle name."""
+
+
+def InterruptibleSleep(sleep_time):
+  """Puts thread to sleep, checking this threads exit_flag four times a second.
+
+  Args:
+    sleep_time: Time to sleep.
+  """
+  slept = 0.0
+  epsilon = .0001
+  thread = threading.currentThread()
+  while slept < sleep_time - epsilon:
+    remaining = sleep_time - slept
+    this_sleep_time = min(remaining, 0.25)
+    time.sleep(this_sleep_time)
+    slept += this_sleep_time
+    if hasattr(thread, 'exit_flag') and thread.exit_flag:
+      return
+
+
+class Throttle(object):
+  """A base class for upload rate throttling.
+
+  Transferring large number of entities, too quickly, could trigger
+  quota limits and cause the transfer process to halt.  In order to
+  stay within the application's quota, we throttle the data transfer
+  to a specified limit (across all transfer threads).
+
+  This class tracks a moving average of some aspect of the transfer
+  rate (bandwidth, records per second, http connections per
+  second). It keeps two windows of counts of bytes transferred, on a
+  per-thread basis. One block is the "current" block, and the other is
+  the "prior" block. It will rotate the counts from current to prior
+  when ROTATE_PERIOD has passed.  Thus, the current block will
+  represent from 0 seconds to ROTATE_PERIOD seconds of activity
+  (determined by: time.time() - self.last_rotate).  The prior block
+  will always represent a full ROTATE_PERIOD.
+
+  Sleeping is performed just before a transfer of another block, and is
+  based on the counts transferred *before* the next transfer. It really
+  does not matter how much will be transferred, but only that for all the
+  data transferred SO FAR that we have interspersed enough pauses to
+  ensure the aggregate transfer rate is within the specified limit.
+
+  These counts are maintained on a per-thread basis, so we do not require
+  any interlocks around incrementing the counts. There IS an interlock on
+  the rotation of the counts because we do not want multiple threads to
+  multiply-rotate the counts.
+
+  There are various race conditions in the computation and collection
+  of these counts. We do not require precise values, but simply to
+  keep the overall transfer within the bandwidth limits. If a given
+  pause is a little short, or a little long, then the aggregate delays
+  will be correct.
+  """
+
+  ROTATE_PERIOD = 600
+
+  def __init__(self,
+               get_time=time.time,
+               thread_sleep=InterruptibleSleep,
+               layout=None):
+    self.get_time = get_time
+    self.thread_sleep = thread_sleep
+
+    self.start_time = get_time()
+    self.transferred = {}
+    self.prior_block = {}
+    self.totals = {}
+    self.throttles = {}
+
+    self.last_rotate = {}
+    self.rotate_mutex = {}
+    if layout:
+      self.AddThrottles(layout)
+
+  def AddThrottle(self, name, limit):
+    self.throttles[name] = limit
+    self.transferred[name] = {}
+    self.prior_block[name] = {}
+    self.totals[name] = {}
+    self.last_rotate[name] = self.get_time()
+    self.rotate_mutex[name] = threading.Lock()
+
+  def AddThrottles(self, layout):
+    for key, value in layout.iteritems():
+      self.AddThrottle(key, value)
+
+  def Register(self, thread):
+    """Register this thread with the throttler."""
+    thread_id = id(thread)
+    for throttle_name in self.throttles.iterkeys():
+      self.transferred[throttle_name][thread_id] = 0
+      self.prior_block[throttle_name][thread_id] = 0
+      self.totals[throttle_name][thread_id] = 0
+
+  def VerifyThrottleName(self, throttle_name):
+    if throttle_name not in self.throttles:
+      raise UnknownThrottleNameError('%s is not a registered throttle' %
+                                     throttle_name)
+
+  def AddTransfer(self, throttle_name, token_count):
+    """Add a count to the amount this thread has transferred.
+
+    Each time a thread transfers some data, it should call this method to
+    note the amount sent. The counts may be rotated if sufficient time
+    has passed since the last rotation.
+
+    Args:
+      throttle_name: The name of the throttle to add to.
+      token_count: The number to add to the throttle counter.
+    """
+    self.VerifyThrottleName(throttle_name)
+    transferred = self.transferred[throttle_name]
+    try:
+      transferred[id(threading.currentThread())] += token_count
+    except KeyError:
+      thread = threading.currentThread()
+      raise ThreadNotRegisteredError(
+          'Unregistered thread accessing throttled datastore stub: id = %s\n'
+          'name = %s' % (id(thread), thread.getName()))
+
+    if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
+      self._RotateCounts(throttle_name)
+
+  def Sleep(self, throttle_name=None):
+    """Possibly sleep in order to limit the transfer rate.
+
+    Note that we sleep based on *prior* transfers rather than what we
+    may be about to transfer. The next transfer could put us under/over
+    and that will be rectified *after* that transfer. Net result is that
+    the average transfer rate will remain within bounds. Spiky behavior
+    or uneven rates among the threads could possibly bring the transfer
+    rate above the requested limit for short durations.
+
+    Args:
+      throttle_name: The name of the throttle to sleep on.  If None or
+        omitted, then sleep on all throttles.
+    """
+    if throttle_name is None:
+      for throttle_name in self.throttles:
+        self.Sleep(throttle_name=throttle_name)
+      return
+
+    self.VerifyThrottleName(throttle_name)
+
+    thread = threading.currentThread()
+
+    while True:
+      duration = self.get_time() - self.last_rotate[throttle_name]
+
+      total = 0
+      for count in self.prior_block[throttle_name].values():
+        total += count
+
+      if total:
+        duration += self.ROTATE_PERIOD
+
+      for count in self.transferred[throttle_name].values():
+        total += count
+
+      sleep_time = self._SleepTime(total, self.throttles[throttle_name],
+                                   duration)
+
+      if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
+        break
+
+      logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
+                   '(duration=%.1f ms, total=%d)',
+                   thread.getName(), throttle_name,
+                   sleep_time * 1000, duration * 1000, total)
+      self.thread_sleep(sleep_time)
+      if thread.exit_flag:
+        break
+      self._RotateCounts(throttle_name)
+
+  def _SleepTime(self, total, limit, duration):
+    """Calculate the time to sleep on a throttle.
+
+    Args:
+      total: The total amount transferred.
+      limit: The amount per second that is allowed to be sent.
+      duration: The amount of time taken to send the total.
+
+    Returns:
+      A float for the amount of time to sleep.
+    """
+    if not limit:
+      return 0.0
+    return max(0.0, (total / limit) - duration)
+
+  def _RotateCounts(self, throttle_name):
+    """Rotate the transfer counters.
+
+    If sufficient time has passed, then rotate the counters from active to
+    the prior-block of counts.
+
+    This rotation is interlocked to ensure that multiple threads do not
+    over-rotate the counts.
+
+    Args:
+      throttle_name: The name of the throttle to rotate.
+    """
+    self.VerifyThrottleName(throttle_name)
+    self.rotate_mutex[throttle_name].acquire()
+    try:
+      next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
+      if next_rotate_time >= self.get_time():
+        return
+
+      for name, count in self.transferred[throttle_name].items():
+
+
+        self.prior_block[throttle_name][name] = count
+        self.transferred[throttle_name][name] = 0
+
+        self.totals[throttle_name][name] += count
+
+      self.last_rotate[throttle_name] = self.get_time()
+
+    finally:
+      self.rotate_mutex[throttle_name].release()
+
+  def TotalTransferred(self, throttle_name):
+    """Return the total transferred, and over what period.
+
+    Args:
+      throttle_name: The name of the throttle to total.
+
+    Returns:
+      A tuple of the total count and running time for the given throttle name.
+    """
+    total = 0
+    for count in self.totals[throttle_name].values():
+      total += count
+    for count in self.transferred[throttle_name].values():
+      total += count
+    return total, self.get_time() - self.start_time
+
+
+BANDWIDTH_UP = 'http-bandwidth-up'
+BANDWIDTH_DOWN = 'http-bandwidth-down'
+REQUESTS = 'http-requests'
+HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
+HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
+HTTPS_REQUESTS = 'https-requests'
+DATASTORE_CALL_COUNT = 'datastore-call-count'
+ENTITIES_FETCHED = 'entities-fetched'
+ENTITIES_MODIFIED = 'entities-modified'
+INDEX_MODIFICATIONS = 'index-modifications'
+
+
+DEFAULT_LIMITS = {
+    BANDWIDTH_UP: 100000,
+    BANDWIDTH_DOWN: 100000,
+    REQUESTS: 15,
+    HTTPS_BANDWIDTH_UP: 100000,
+    HTTPS_BANDWIDTH_DOWN: 100000,
+    HTTPS_REQUESTS: 15,
+    DATASTORE_CALL_COUNT: 120,
+    ENTITIES_FETCHED: 400,
+    ENTITIES_MODIFIED: 400,
+    INDEX_MODIFICATIONS: 1600,
+}
+
+NO_LIMITS = {
+    BANDWIDTH_UP: None,
+    BANDWIDTH_DOWN: None,
+    REQUESTS: None,
+    HTTPS_BANDWIDTH_UP: None,
+    HTTPS_BANDWIDTH_DOWN: None,
+    HTTPS_REQUESTS: None,
+    DATASTORE_CALL_COUNT: None,
+    ENTITIES_FETCHED: None,
+    ENTITIES_MODIFIED: None,
+    INDEX_MODIFICATIONS: None,
+}
+
+
+def DefaultThrottle(multiplier=1.0):
+  """Return a Throttle instance with multiplier * the quota limits."""
+  layout = dict([(name, multiplier * limit)
+                 for (name, limit) in DEFAULT_LIMITS.iteritems()])
+  return Throttle(layout=layout)
+
+
+class ThrottleHandler(urllib2.BaseHandler):
+  """A urllib2 handler for http and https requests that adds to a throttle."""
+
+  def __init__(self, throttle):
+    """Initialize a ThrottleHandler.
+
+    Args:
+      throttle: A Throttle instance to call for bandwidth and http/https request
+        throttling.
+    """
+    self.throttle = throttle
+
+  def AddRequest(self, throttle_name, req):
+    """Add to bandwidth throttle for given request.
+
+    Args:
+      throttle_name: The name of the bandwidth throttle to add to.
+      req: The request whose size will be added to the throttle.
+    """
+    size = 0
+    for key, value in req.headers.iteritems():
+      size += len('%s: %s\n' % (key, value))
+    for key, value in req.unredirected_hdrs.iteritems():
+      size += len('%s: %s\n' % (key, value))
+    (unused_scheme,
+     unused_host_port, url_path,
+     unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
+    size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
+    data = req.get_data()
+    if data:
+      size += len(data)
+    self.throttle.AddTransfer(throttle_name, size)
+
+  def AddResponse(self, throttle_name, res):
+    """Add to bandwidth throttle for given response.
+
+    Args:
+      throttle_name: The name of the bandwidth throttle to add to.
+      res: The response whose size will be added to the throttle.
+    """
+    content = res.read()
+
+    def ReturnContent():
+      return content
+
+    res.read = ReturnContent
+    size = len(content)
+    headers = res.info()
+    for key, value in headers.items():
+      size += len('%s: %s\n' % (key, value))
+    self.throttle.AddTransfer(throttle_name, size)
+
+  def http_request(self, req):
+    """Process an HTTP request.
+
+    If the throttle is over quota, sleep first.  Then add request size to
+    throttle before returning it to be sent.
+
+    Args:
+      req: A urllib2.Request object.
+
+    Returns:
+      The request passed in.
+    """
+    self.throttle.Sleep(BANDWIDTH_UP)
+    self.throttle.Sleep(BANDWIDTH_DOWN)
+    self.AddRequest(BANDWIDTH_UP, req)
+    return req
+
+  def https_request(self, req):
+    """Process an HTTPS request.
+
+    If the throttle is over quota, sleep first.  Then add request size to
+    throttle before returning it to be sent.
+
+    Args:
+      req: A urllib2.Request object.
+
+    Returns:
+      The request passed in.
+    """
+    self.throttle.Sleep(HTTPS_BANDWIDTH_UP)
+    self.throttle.Sleep(HTTPS_BANDWIDTH_DOWN)
+    self.AddRequest(HTTPS_BANDWIDTH_UP, req)
+    return req
+
+  def http_response(self, unused_req, res):
+    """Process an HTTP response.
+
+    The size of the response is added to the bandwidth throttle and the request
+    throttle is incremented by one.
+
+    Args:
+      unused_req: The urllib2 request for this response.
+      res: A urllib2 response object.
+
+    Returns:
+      The response passed in.
+    """
+    self.AddResponse(BANDWIDTH_DOWN, res)
+    self.throttle.AddTransfer(REQUESTS, 1)
+    return res
+
+  def https_response(self, unused_req, res):
+    """Process an HTTPS response.
+
+    The size of the response is added to the bandwidth throttle and the request
+    throttle is incremented by one.
+
+    Args:
+      unused_req: The urllib2 request for this response.
+      res: A urllib2 response object.
+
+    Returns:
+      The response passed in.
+    """
+    self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
+    self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
+    return res
+
+
+class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
+  """Provides a simplified RPC-style interface for HTTP requests.
+
+  This RPC server uses a Throttle to prevent exceeding quotas.
+  """
+
+  def __init__(self, throttle, *args, **kwargs):
+    """Initialize a ThrottledHttpRpcServer.
+
+    Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
+
+    Args:
+      throttle: A Throttles instance.
+      args: Positional arguments to pass through to
+        appengine_rpc.HttpRpcServer.__init__
+      kwargs: Keyword arguments to pass through to
+        appengine_rpc.HttpRpcServer.__init__
+    """
+    self.throttle = throttle
+    appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
+
+  def _GetOpener(self):
+    """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+    Returns:
+      A urllib2.OpenerDirector object.
+    """
+    opener = appengine_rpc.HttpRpcServer._GetOpener(self)
+    opener.add_handler(ThrottleHandler(self.throttle))
+
+    return opener
+
+
+def ThrottledHttpRpcServerFactory(throttle):
+  """Create a factory to produce ThrottledHttpRpcServer for a given throttle.
+
+  Args:
+    throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
+
+  Returns:
+    A factory to produce a ThrottledHttpRpcServer.
+  """
+
+  def MakeRpcServer(*args, **kwargs):
+    """Factory to produce a ThrottledHttpRpcServer.
+
+    Args:
+      args: Positional args to pass to ThrottledHttpRpcServer.
+      kwargs: Keyword args to pass to ThrottledHttpRpcServer.
+
+    Returns:
+      A ThrottledHttpRpcServer instance.
+    """
+    kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
+    kwargs['save_cookies'] = True
+    rpc_server = ThrottledHttpRpcServer(throttle, *args, **kwargs)
+    return rpc_server
+  return MakeRpcServer
+
+
+class Throttler(object):
+  def PrehookHandler(self, service, call, request, response):
+    handler = getattr(self, '_Prehook_' + call, None)
+    if handler:
+      handler(request, response)
+
+  def PosthookHandler(self, service, call, request, response):
+    handler = getattr(self, '_Posthook_' + call, None)
+    if handler:
+      handler(request, response)
+
+
+def SleepHandler(*throttle_names):
+  def SleepOnThrottles(self, request, response):
+    for throttle_name in throttle_names:
+      self._DatastoreThrottler__throttle.Sleep(throttle_name)
+  return SleepOnThrottles
+
+
+class DatastoreThrottler(Throttler):
+  def __init__(self, throttle):
+    Throttler.__init__(self)
+    self.__throttle = throttle
+
+  def AddCost(self, cost_proto):
+    """Add costs from the Cost protobuf."""
+    self.__throttle.AddTransfer(INDEX_MODIFICATIONS, cost_proto.index_writes())
+    self.__throttle.AddTransfer(ENTITIES_MODIFIED, cost_proto.entity_writes())
+
+
+  _Prehook_Put = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
+
+  def _Posthook_Put(self, request, response):
+    self.AddCost(response.cost())
+
+
+  _Prehook_Get = SleepHandler(ENTITIES_FETCHED)
+
+  def _Posthook_Get(self, request, response):
+    self.__throttle.AddTransfer(ENTITIES_FETCHED, response.entity_size())
+
+
+  _Prehook_RunQuery = SleepHandler(ENTITIES_FETCHED)
+
+  def _Posthook_RunQuery(self, request, response):
+    if not response.keys_only():
+      self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
+
+
+  _Prehook_Next = SleepHandler(ENTITIES_FETCHED)
+
+  def _Posthook_Next(self, request, response):
+    if not response.keys_only():
+      self.__throttle.AddTransfer(ENTITIES_FETCHED, response.result_size())
+
+
+  _Prehook_Delete = SleepHandler(ENTITIES_MODIFIED, INDEX_MODIFICATIONS)
+
+  def _Posthook_Delete(self, request, response):
+    self.AddCost(response.cost())
+
+
+  _Prehook_Commit = SleepHandler()
+
+  def _Posthook_Commit(self, request, response):
+    self.AddCost(response.cost())
+
+
+def ThrottleRemoteDatastore(throttle, remote_datastore_stub=None):
+  """Install the given throttle for the remote datastore stub.
+
+  Args:
+    throttle: A Throttle instance to limit datastore access rates
+    remote_datastore_stub: The datstore stub instance to throttle, for
+      testing purposes.
+  """
+  if not remote_datastore_stub:
+    remote_datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
+  if not isinstance(remote_datastore_stub, remote_api_stub.RemoteDatastoreStub):
+    raise remote_api_stub.ConfigurationError('remote_api is not configured.')
+  throttler = DatastoreThrottler(throttle)
+  remote_datastore_stub._PreHookHandler = throttler.PrehookHandler
+  remote_datastore_stub._PostHookHandler = throttler.PosthookHandler
--- a/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/search/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -46,6 +46,40 @@
 
 The full text index is stored in a property named __searchable_text_index.
 
+Specifying multiple indexes and properties to index
+---------------------------------------------------
+
+By default, one index is created with all string properties. You can define
+multiple indexes and specify which properties should be indexed for each by
+overriding SearchableProperties() method of model.SearchableModel, for example:
+
+  class Article(search.SearchableModel):
+    @classmethod
+    def SearchableProperties(cls):
+      return [['book', 'author'], ['book']]
+
+In this example, two indexes will be maintained - one that includes 'book' and
+'author' properties, and another one for 'book' property only. They will be
+stored in properties named __searchable_text_index_book_author and
+__searchable_text_index_book respectively. Note that the index that includes
+all properties will not be created unless added explicitly like this:
+
+  @classmethod
+  def SearchableProperties(cls):
+    return [['book', 'author'], ['book'], search.ALL_PROPERTIES]
+
+The default return value of SearchableProperties() is [search.ALL_PROPERTIES]
+(one index, all properties).
+
+To search using a custom-defined index, pass its definition
+in 'properties' parameter of 'search':
+
+  Article.all().search('Lem', properties=['book', 'author'])
+
+Note that the order of properties in the list matters.
+
+Adding indexes to  index.yaml
+-----------------------------
 
 In general, if you just want to provide full text search, you *don't* need to
 add any extra indexes to your index.yaml. However, if you want to use search()
@@ -60,6 +94,9 @@
       direction: desc
     ...
 
+Similarly, if you created a custom index (see above), use the name of the
+property it's stored in, e.g. __searchable_text_index_book_author.
+
 Note that using SearchableModel will noticeable increase the latency of save()
 operations, since it writes an index row for each indexable word. This also
 means that the latency of save() will increase roughly with the size of the
@@ -79,6 +116,8 @@
 from google.appengine.ext import db
 from google.appengine.datastore import datastore_pb
 
+ALL_PROPERTIES = []
+
 class SearchableEntity(datastore.Entity):
   """A subclass of datastore.Entity that supports full text indexing.
 
@@ -124,6 +163,8 @@
 
   _word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
 
+  _searchable_properties = [ALL_PROPERTIES]
+
   def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
                **kwargs):
     """Constructor. May be called as a copy constructor.
@@ -144,6 +185,9 @@
     if isinstance(kind_or_entity, datastore.Entity):
       self._Entity__key = kind_or_entity._Entity__key
       self._Entity__unindexed_properties = frozenset(kind_or_entity.unindexed_properties())
+      if isinstance(kind_or_entity, SearchableEntity):
+        if getattr(kind_or_entity, '_searchable_properties', None) is not None:
+          self._searchable_properties = kind_or_entity._searchable_properties
       self.update(kind_or_entity)
     else:
       super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
@@ -154,22 +198,33 @@
     Returns:
       entity_pb.Entity
     """
-    if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
-      del self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY]
+    for properties_to_index in self._searchable_properties:
+      index_property_name = SearchableEntity.IndexPropertyName(properties_to_index)
+      if index_property_name in self:
+        del self[index_property_name]
+
+
+      if not properties_to_index:
+        properties_to_index = self.keys()
+
+      index = set()
+      for name in properties_to_index:
+        if not self.has_key(name):
+          continue
 
-    index = set()
-    for (name, values) in self.items():
-      if not isinstance(values, list):
-        values = [values]
-      if (isinstance(values[0], basestring) and
-          not isinstance(values[0], datastore_types.Blob)):
-        for value in values:
-          index.update(SearchableEntity._FullTextIndex(
-              value, self._word_delimiter_regex))
+        values = self[name]
+        if not isinstance(values, list):
+          values = [values]
 
-    index_list = list(index)
-    if index_list:
-      self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY] = index_list
+        if (isinstance(values[0], basestring) and
+            not isinstance(values[0], datastore_types.Blob)):
+          for value in values:
+            index.update(SearchableEntity._FullTextIndex(
+                value, self._word_delimiter_regex))
+
+      index_list = list(index)
+      if index_list:
+        self[index_property_name] = index_list
 
     return super(SearchableEntity, self)._ToPb()
 
@@ -206,6 +261,16 @@
 
     return words
 
+  @classmethod
+  def IndexPropertyName(cls, properties):
+    """Given index definition, returns the name of the property to put it in."""
+    name = SearchableEntity._FULL_TEXT_INDEX_PROPERTY
+
+    if properties:
+      name += '_' + '_'.join(properties)
+
+    return name
+
 
 class SearchableQuery(datastore.Query):
   """A subclass of datastore.Query that supports full text search.
@@ -214,7 +279,8 @@
   SearchableEntity or SearchableModel classes.
   """
 
-  def Search(self, search_query, word_delimiter_regex=None):
+  def Search(self, search_query, word_delimiter_regex=None,
+             properties=ALL_PROPERTIES):
     """Add a search query. This may be combined with filters.
 
     Note that keywords in the search query will be silently dropped if they
@@ -230,28 +296,27 @@
     datastore_types.ValidateString(search_query, 'search query')
     self._search_query = search_query
     self._word_delimiter_regex = word_delimiter_regex
+    self._properties = properties
     return self
 
-  def _ToPb(self, limit=None, offset=None):
+  def _ToPb(self, *args, **kwds):
     """Adds filters for the search query, then delegates to the superclass.
 
-    Raises BadFilterError if a filter on the index property already exists.
-
-    Args:
-      # an upper bound on the number of results returned by the query.
-      limit: int
-      # number of results that match the query to skip.  limit is applied
-      # after the offset is fulfilled.
-      offset: int
+    Mimics Query._ToPb()'s signature. Raises BadFilterError if a filter on the
+    index property already exists.
 
     Returns:
       datastore_pb.Query
     """
-    if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
+
+    properties = getattr(self, "_properties", ALL_PROPERTIES)
+
+    index_property_name = SearchableEntity.IndexPropertyName(properties)
+    if index_property_name in self:
       raise datastore_errors.BadFilterError(
-        '%s is a reserved name.' % SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
+        '%s is a reserved name.' % index_property_name)
 
-    pb = super(SearchableQuery, self)._ToPb(limit=limit, offset=offset)
+    pb = super(SearchableQuery, self)._ToPb(*args, **kwds)
 
     if hasattr(self, '_search_query'):
       keywords = SearchableEntity._FullTextIndex(
@@ -260,7 +325,7 @@
         filter = pb.add_filter()
         filter.set_op(datastore_pb.Query_Filter.EQUAL)
         prop = filter.add_property()
-        prop.set_name(SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
+        prop.set_name(index_property_name)
         prop.set_multiple(len(keywords) > 1)
         prop.mutable_value().set_stringvalue(unicode(keyword).encode('utf-8'))
 
@@ -290,13 +355,21 @@
 
   Automatically indexes all string-based properties. To search, use the all()
   method to get a SearchableModel.Query, then use its search() method.
+
+  Override SearchableProperties() to define properties to index and/or multiple
+  indexes (see the file's comment).
   """
 
+  @classmethod
+  def SearchableProperties(cls):
+    return [ALL_PROPERTIES]
+
   class Query(db.Query):
     """A subclass of db.Query that supports full text search."""
     _search_query = None
+    _properties = None
 
-    def search(self, search_query):
+    def search(self, search_query, properties=ALL_PROPERTIES):
       """Adds a full text search to this query.
 
       Args:
@@ -306,6 +379,13 @@
         self
       """
       self._search_query = search_query
+      self._properties = properties
+
+      if self._properties not in getattr(self, '_searchable_properties', [ALL_PROPERTIES]):
+        raise datastore_errors.BadFilterError(
+          '%s does not have a corresponding index. Please add it to'
+          'the SEARCHABLE_PROPERTIES list' % self._properties)
+
       return self
 
     def _get_query(self):
@@ -314,14 +394,16 @@
                                   _query_class=SearchableQuery,
                                   _multi_query_class=SearchableMultiQuery)
       if self._search_query:
-        query.Search(self._search_query)
+        query.Search(self._search_query, properties=self._properties)
       return query
 
   def _populate_internal_entity(self):
     """Wraps db.Model._populate_internal_entity() and injects
     SearchableEntity."""
-    return db.Model._populate_internal_entity(self,
-                                              _entity_class=SearchableEntity)
+    entity = db.Model._populate_internal_entity(self,
+                                                _entity_class=SearchableEntity)
+    entity._searchable_properties = self.SearchableProperties()
+    return entity
 
   @classmethod
   def from_entity(cls, entity):
@@ -333,4 +415,6 @@
   @classmethod
   def all(cls):
     """Returns a SearchableModel.Query for this kind."""
-    return SearchableModel.Query(cls)
+    query = SearchableModel.Query(cls)
+    query._searchable_properties = cls.SearchableProperties()
+    return query
--- a/thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -247,6 +247,9 @@
       except UnicodeError, e:
         logging.warning('Response written is not UTF-8: %s', e)
 
+    if (self.headers.get('Cache-Control') == 'no-cache' and
+        not self.headers.get('Expires')):
+      self.headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
     self.headers['Content-Length'] = str(len(body))
     write = start_response('%d %s' % self.__status, self.__wsgi_headers)
     write(body)
@@ -463,6 +466,9 @@
   The URL mapping is first-match based on the list ordering.
   """
 
+  REQUEST_CLASS = Request
+  RESPONSE_CLASS = Response
+
   def __init__(self, url_mapping, debug=False):
     """Initializes this application with the given URL mapping.
 
@@ -477,8 +483,8 @@
 
   def __call__(self, environ, start_response):
     """Called by WSGI when a request comes in."""
-    request = Request(environ)
-    response = Response()
+    request = self.REQUEST_CLASS(environ)
+    response = self.RESPONSE_CLASS()
 
     WSGIApplication.active_instance = self
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/mail_handlers.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Handler library for inbound Mail API.
+
+Contains handlers to help with receiving mail.
+
+  InboundMailHandler: Has helper method for easily setting up
+    email recievers.
+"""
+
+
+
+
+
+from google.appengine.api import mail
+from google.appengine.ext import webapp
+
+
+MAIL_HANDLER_URL_PATTERN = '/_ah/mail/.+'
+
+
+class InboundMailHandler(webapp.RequestHandler):
+  """Base class for inbound mail handlers.
+
+  Example:
+
+    # Sub-class overrides receive method.
+    class HelloReceiver(InboundMailHandler):
+
+      def receive(self, mail_message):
+        logging.info('Received greeting from %s: %s' % (mail_message.sender,
+                                                        mail_message.body))
+
+
+    # Map mail handler to appliction.
+    application = webapp.WSGIApplication([
+        HelloReceiver.mapping(),
+    ])
+  """
+
+  def post(self):
+    """Transforms body to email request."""
+    self.receive(mail.InboundEmailMessage(self.request.body))
+
+  def receive(self, mail_message):
+    """Receive an email message.
+
+    Override this method to implement an email receiver.
+
+    Args:
+      mail_message: InboundEmailMessage instance representing received
+        email.
+    """
+    pass
+
+  @classmethod
+  def mapping(cls):
+    """Convenience method to map handler class to application.
+
+    Returns:
+      Mapping from email URL to inbound mail handler class.
+    """
+    return MAIL_HANDLER_URL_PATTERN, cls
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/ext/webapp/xmpp_handlers.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP webapp handler classes.
+
+This module provides handler classes for XMPP bots, including both basic
+messaging functionality and a command handler for commands such as "/foo bar"
+"""
+
+
+
+import logging
+from google.appengine.api import xmpp
+from google.appengine.ext import webapp
+
+
+class BaseHandler(webapp.RequestHandler):
+  """A webapp baseclass for XMPP handlers.
+
+  Implements a straightforward message delivery pattern. When a message is
+  received, message_received is called with a Message object that encapsulates
+  the relevant details. Users can reply using the standard XMPP API, or the
+  convenient .reply() method on the Message object.
+  """
+
+  def message_received(self, message):
+    """Called when a message is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    raise NotImplementedError()
+
+  def handle_exception(self, exception, debug_mode):
+    """Called if this handler throws an exception during execution.
+
+    Args:
+      exception: the exception that was thrown
+      debug_mode: True if the web application is running in debug mode
+    """
+    super(BaseHandler, self).handle_exception(exception, debug_mode)
+    if self.xmpp_message:
+      self.xmpp_message.reply('Oops. Something went wrong.')
+
+  def post(self):
+    try:
+      self.xmpp_message = xmpp.Message(self.request.POST)
+    except xmpp.InvalidMessageError, e:
+      logging.error("Invalid XMPP request: Missing required field %s", e[0])
+      return
+    self.message_received(self.xmpp_message)
+
+
+class CommandHandlerMixin(object):
+  """A command handler for XMPP bots.
+
+  Implements a command handler pattern. XMPP messages are processed by calling
+  message_received. Message objects handled by this class are annotated with
+  'command' and 'arg' fields. On receipt of a message starting with a forward
+  or backward slash, the handler calls a method named after the command - eg,
+  if the user sends "/foo bar", the handler will call foo_command(message).
+  If no handler method matches, unhandled_command is called. The default behaviour
+  of unhandled_command is to send the message "Unknown command" back to
+  the sender.
+
+  If the user sends a message not prefixed with a slash,
+  text_message(message) is called.
+  """
+
+  def unhandled_command(self, message):
+    """Called when an unknown command is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    message.reply('Unknown command')
+
+  def text_message(self, message):
+    """Called when a message not prefixed by a /command is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    pass
+
+  def message_received(self, message):
+    """Called when a message is sent to the XMPP bot.
+
+    Args:
+      message: Message: The message that was sent by the user.
+    """
+    if message.command:
+      handler_name = '%s_command' % (message.command,)
+      handler = getattr(self, handler_name, None)
+      if handler:
+        handler(message)
+      else:
+        self.unhandled_command(message)
+    else:
+      self.text_message(message)
+
+
+class CommandHandler(CommandHandlerMixin, BaseHandler):
+  """A webapp implementation of CommandHandlerMixin."""
+  pass
--- a/thirdparty/google_appengine/google/appengine/runtime/apiproxy.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/runtime/apiproxy.py	Sun Sep 06 23:31:53 2009 +0200
@@ -131,6 +131,7 @@
 
   def __MakeCallDone(self):
     self.__state = RPC.FINISHING
+    self.cpu_usage_mcycles = self.__result_dict['cpu_usage_mcycles']
     if self.__result_dict['error'] == APPLICATION_ERROR:
       self.__exception = apiproxy_errors.ApplicationError(
           self.__result_dict['application_error'],
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/adaptive_thread_pool.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,460 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Provides thread-pool-like functionality for workers accessing App Engine.
+
+The pool adapts to slow or timing out requests by reducing the number of
+active workers, or increasing the number when requests latency reduces.
+"""
+
+
+
+import logging
+import Queue
+import sys
+import threading
+import time
+import traceback
+
+from google.appengine.tools.requeue import ReQueue
+
+logger = logging.getLogger('google.appengine.tools.adaptive_thread_pool')
+
+_THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
+
+INITIAL_BACKOFF = 1.0
+
+BACKOFF_FACTOR = 2.0
+
+
+class Error(Exception):
+  """Base-class for exceptions in this module."""
+
+
+class WorkItemError(Error):
+  """Error while processing a WorkItem."""
+
+
+class RetryException(Error):
+  """A non-fatal exception that indicates that a work item should be retried."""
+
+
+def InterruptibleSleep(sleep_time):
+  """Puts thread to sleep, checking this threads exit_flag four times a second.
+
+  Args:
+    sleep_time: Time to sleep.
+  """
+  slept = 0.0
+  epsilon = .0001
+  thread = threading.currentThread()
+  while slept < sleep_time - epsilon:
+    remaining = sleep_time - slept
+    this_sleep_time = min(remaining, 0.25)
+    time.sleep(this_sleep_time)
+    slept += this_sleep_time
+    if thread.exit_flag:
+      return
+
+
+class WorkerThread(threading.Thread):
+  """A WorkerThread to execute WorkItems.
+
+  Attributes:
+    exit_flag: A boolean indicating whether this thread should stop
+      its work and exit.
+  """
+
+  def __init__(self, thread_pool, thread_gate, name=None):
+    """Initialize a WorkerThread instance.
+
+    Args:
+      thread_pool: An AdaptiveThreadPool instance.
+      thread_gate: A ThreadGate instance.
+      name: A name for this WorkerThread.
+    """
+    threading.Thread.__init__(self)
+
+    self.setDaemon(True)
+
+    self.exit_flag = False
+    self.__error = None
+    self.__traceback = None
+    self.__thread_pool = thread_pool
+    self.__work_queue = thread_pool.requeue
+    self.__thread_gate = thread_gate
+    if not name:
+      self.__name = 'Anonymous_' + self.__class__.__name__
+    else:
+      self.__name = name
+
+  def run(self):
+    """Perform the work of the thread."""
+    logger.debug('[%s] %s: started', self.getName(), self.__class__.__name__)
+
+    try:
+      self.WorkOnItems()
+    except:
+      self.SetError()
+
+    logger.debug('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+
+  def SetError(self):
+    """Sets the error and traceback information for this thread.
+
+    This must be called from an exception handler.
+    """
+    if not self.__error:
+      exc_info = sys.exc_info()
+      self.__error = exc_info[1]
+      self.__traceback = exc_info[2]
+      logger.exception('[%s] %s:', self.getName(), self.__class__.__name__)
+
+  def WorkOnItems(self):
+    """Perform the work of a WorkerThread."""
+    while not self.exit_flag:
+      item = None
+      self.__thread_gate.StartWork()
+      try:
+        status, instruction = WorkItem.FAILURE, ThreadGate.DECREASE
+        try:
+          if self.exit_flag:
+            instruction = ThreadGate.HOLD
+            break
+
+          try:
+            item = self.__work_queue.get(block=True, timeout=1.0)
+          except Queue.Empty:
+            instruction = ThreadGate.HOLD
+            continue
+          if item == _THREAD_SHOULD_EXIT or self.exit_flag:
+            status, instruction = WorkItem.SUCCESS, ThreadGate.HOLD
+            break
+
+          logger.debug('[%s] Got work item %s', self.getName(), item)
+
+          status, instruction = item.PerformWork(self.__thread_pool)
+        except RetryException:
+          status, instruction = WorkItem.RETRY, ThreadGate.HOLD
+        except:
+          self.SetError()
+          raise
+
+      finally:
+        try:
+          if item:
+            if status == WorkItem.SUCCESS:
+              self.__work_queue.task_done()
+            elif status == WorkItem.RETRY:
+              try:
+                self.__work_queue.reput(item, block=False)
+              except Queue.Full:
+                logger.error('[%s] Failed to reput work item.', self.getName())
+                raise Error('Failed to reput work item')
+            else:
+              if not self.__error:
+                if item.error:
+                  self.__error = item.error
+                  self.__traceback = item.traceback
+                else:
+                  self.__error = WorkItemError(
+                      'Fatal error while processing %s' % item)
+                raise self.__error
+
+        finally:
+          self.__thread_gate.FinishWork(instruction=instruction)
+
+  def CheckError(self):
+    """If an error is present, then log it."""
+    if self.__error:
+      logger.error('Error in %s: %s', self.getName(), self.__error)
+      if self.__traceback:
+        logger.debug('%s', ''.join(traceback.format_exception(
+            self.__error.__class__,
+            self.__error,
+            self.__traceback)))
+
+  def __str__(self):
+    return self.__name
+
+
+class AdaptiveThreadPool(object):
+  """A thread pool which processes WorkItems from a queue.
+
+  Attributes:
+    requeue: The requeue instance which holds work items for this
+      thread pool.
+  """
+
+  def __init__(self,
+               num_threads,
+               queue_size=None,
+               base_thread_name=None,
+               worker_thread_factory=WorkerThread,
+               queue_factory=Queue.Queue):
+    """Initialize an AdaptiveThreadPool.
+
+    An adaptive thread pool executes WorkItems using a number of
+    WorkerThreads.  WorkItems represent items of work that may
+    succeed, soft fail, or hard fail. In addition, a completed work
+    item can signal this AdaptiveThreadPool to enable more or fewer
+    threads.  Initially one thread is active.  Soft failures are
+    reqeueud to be retried.  Hard failures cause this
+    AdaptiveThreadPool to shut down entirely.  See the WorkItem class
+    for more details.
+
+    Args:
+      num_threads: The number of threads to use.
+      queue_size: The size of the work item queue to use.
+      base_thread_name: A string from which worker thread names are derived.
+      worker_thread_factory: A factory which procudes WorkerThreads.
+      queue_factory: Used for dependency injection.
+    """
+    if queue_size is None:
+      queue_size = num_threads
+    self.requeue = ReQueue(queue_size, queue_factory=queue_factory)
+    self.__thread_gate = ThreadGate(num_threads)
+    self.__num_threads = num_threads
+    self.__threads = []
+    for i in xrange(num_threads):
+      thread = worker_thread_factory(self, self.__thread_gate)
+      if base_thread_name:
+        base = base_thread_name
+      else:
+        base = thread.__class__.__name__
+      thread.name = '%s-%d' % (base, i)
+      self.__threads.append(thread)
+      thread.start()
+
+  def num_threads(self):
+    """Return the number of threads in this thread pool."""
+    return self.__num_threads
+
+  def Threads(self):
+    """Yields the registered threads."""
+    for thread in self.__threads:
+      yield thread
+
+  def SubmitItem(self, item, block=True, timeout=0.0):
+    """Submit a WorkItem to the AdaptiveThreadPool.
+
+    Args:
+      item: A WorkItem instance.
+      block: Whether to block on submitting if the submit queue is full.
+      timeout: Time wait for room in the queue if block is True, 0.0 to
+        block indefinitely.
+
+    Raises:
+      Queue.Full if the submit queue is full.
+    """
+    self.requeue.put(item, block=block, timeout=timeout)
+
+  def QueuedItemCount(self):
+    """Returns the number of items currently in the queue."""
+    return self.requeue.qsize()
+
+  def Shutdown(self):
+    """Shutdown the thread pool.
+
+    Tasks may remain unexecuted in the submit queue.
+    """
+    while not self.requeue.empty():
+      try:
+        unused_item = self.requeue.get_nowait()
+        self.requeue.task_done()
+      except Queue.Empty:
+        pass
+    for thread in self.__threads:
+      thread.exit_flag = True
+      self.requeue.put(_THREAD_SHOULD_EXIT)
+    self.__thread_gate.EnableAllThreads()
+
+  def Wait(self):
+    """Wait until all work items have been completed."""
+    self.requeue.join()
+
+  def JoinThreads(self):
+    """Wait for all threads to exit."""
+    for thread in self.__threads:
+      logger.debug('Waiting for %s to exit' % str(thread))
+      thread.join()
+
+  def CheckErrors(self):
+    """Output logs for any errors that occurred in the worker threads."""
+    for thread in self.__threads:
+      thread.CheckError()
+
+
+class ThreadGate(object):
+  """Manage the number of active worker threads.
+
+  The ThreadGate limits the number of threads that are simultaneously
+  active in order to implement adaptive rate control.
+
+  Initially the ThreadGate allows only one thread to be active.  For
+  each successful work item, another thread is activated and for each
+  failed item, the number of active threads is reduced by one.  When only
+  one thread is active, failures will cause exponential backoff.
+
+  For example, a ThreadGate instance, thread_gate can be used in a number
+  of threads as so:
+
+  # Block until this thread is enabled for work.
+  thread_gate.StartWork()
+  try:
+    status = DoSomeWorkInvolvingLimitedSharedResources()
+    suceeded = IsStatusGood(status)
+    badly_failed = IsStatusVeryBad(status)
+  finally:
+    if suceeded:
+      # Suceeded, add more simultaneously enabled threads to the task.
+      thread_gate.FinishWork(instruction=ThreadGate.INCREASE)
+    elif badly_failed:
+      # Failed, or succeeded but with high resource load, reduce number of
+      # workers.
+      thread_gate.FinishWork(instruction=ThreadGate.DECREASE)
+    else:
+      # We succeeded, but don't want to add more workers to the task.
+      thread_gate.FinishWork(instruction=ThreadGate.HOLD)
+
+  the thread_gate will enable and disable/backoff threads in response to
+  resource load conditions.
+
+  StartWork can block indefinitely. FinishWork, while not
+  lock-free, should never block absent a demonic scheduler.
+  """
+
+  INCREASE = 'increase'
+  HOLD = 'hold'
+  DECREASE = 'decrease'
+
+  def __init__(self,
+               num_threads,
+               sleep=InterruptibleSleep):
+    """Constructor for ThreadGate instances.
+
+    Args:
+      num_threads: The total number of threads using this gate.
+      sleep: Used for dependency injection.
+    """
+    self.__enabled_count = 1
+    self.__lock = threading.Lock()
+    self.__thread_semaphore = threading.Semaphore(self.__enabled_count)
+    self.__num_threads = num_threads
+    self.__backoff_time = 0
+    self.__sleep = sleep
+
+  def num_threads(self):
+    return self.__num_threads
+
+  def EnableThread(self):
+    """Enable one more worker thread."""
+    self.__lock.acquire()
+    try:
+      self.__enabled_count += 1
+    finally:
+      self.__lock.release()
+    self.__thread_semaphore.release()
+
+  def EnableAllThreads(self):
+    """Enable all worker threads."""
+    for unused_idx in xrange(self.__num_threads - self.__enabled_count):
+      self.EnableThread()
+
+  def StartWork(self):
+    """Starts a critical section in which the number of workers is limited.
+
+    Starts a critical section which allows self.__enabled_count
+    simultaneously operating threads. The critical section is ended by
+    calling self.FinishWork().
+    """
+    self.__thread_semaphore.acquire()
+    if self.__backoff_time > 0.0:
+      if not threading.currentThread().exit_flag:
+        logger.info('Backing off due to errors: %.1f seconds',
+                    self.__backoff_time)
+        self.__sleep(self.__backoff_time)
+
+  def FinishWork(self, instruction=None):
+    """Ends a critical section started with self.StartWork()."""
+    if not instruction or instruction == ThreadGate.HOLD:
+      self.__thread_semaphore.release()
+
+    elif instruction == ThreadGate.INCREASE:
+      if self.__backoff_time > 0.0:
+        logger.info('Resetting backoff to 0.0')
+        self.__backoff_time = 0.0
+      do_enable = False
+      self.__lock.acquire()
+      try:
+        if self.__num_threads > self.__enabled_count:
+          do_enable = True
+          self.__enabled_count += 1
+      finally:
+        self.__lock.release()
+      if do_enable:
+        logger.debug('Increasing active thread count to %d',
+                     self.__enabled_count)
+        self.__thread_semaphore.release()
+      self.__thread_semaphore.release()
+
+    elif instruction == ThreadGate.DECREASE:
+      do_disable = False
+      self.__lock.acquire()
+      try:
+        if self.__enabled_count > 1:
+          do_disable = True
+          self.__enabled_count -= 1
+        else:
+          if self.__backoff_time == 0.0:
+            self.__backoff_time = INITIAL_BACKOFF
+          else:
+            self.__backoff_time *= BACKOFF_FACTOR
+      finally:
+        self.__lock.release()
+        if do_disable:
+          logger.debug('Decreasing the number of active threads to %d',
+                       self.__enabled_count)
+        else:
+          self.__thread_semaphore.release()
+
+
+class WorkItem(object):
+  """Holds a unit of work."""
+
+  SUCCESS = 'success'
+  RETRY = 'retry'
+  FAILURE = 'failure'
+
+  def __init__(self, name):
+    self.__name = name
+
+  def PerformWork(self, thread_pool):
+    """Perform the work of this work item and report the results.
+
+    Args:
+      thread_pool: The AdaptiveThreadPool instance associated with this
+        thread.
+
+    Returns:
+      A tuple (status, instruction) of the work status and an instruction
+      for the ThreadGate.
+    """
+    raise NotImplementedError
+
+  def __str__(self):
+    return self.__name
--- a/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/appcfg.py	Sun Sep 06 23:31:53 2009 +0200
@@ -36,11 +36,13 @@
 import mimetypes
 import optparse
 import os
+import random
 import re
 import sha
 import sys
 import tempfile
 import time
+import urllib
 import urllib2
 
 import google
@@ -69,6 +71,11 @@
 
 MAX_LOG_LEVEL = 4
 
+MAX_BATCH_SIZE = 1000000
+MAX_BATCH_COUNT = 100
+MAX_BATCH_FILE_SIZE = 200000
+BATCH_OVERHEAD = 500
+
 verbosity = 1
 
 
@@ -220,6 +227,25 @@
   return max_tries > 0
 
 
+def _VersionList(release):
+  """Parse a version string into a list of ints.
+
+  Args:
+    release: The 'release' version, e.g. '1.2.4'.
+        (Due to YAML parsing this may also be an int or float.)
+
+  Returns:
+    A list of ints corresponding to the parts of the version string
+    between periods.  Example:
+      '1.2.4' -> [1, 2, 4]
+      '1.2.3.4' -> [1, 2, 3, 4]
+
+  Raises:
+    ValueError if not all the parts are valid integers.
+  """
+  return [int(part) for part in str(release).split('.')]
+
+
 class UpdateCheck(object):
   """Determines if the local SDK is the latest version.
 
@@ -332,10 +358,26 @@
       return
 
     latest = yaml.safe_load(response)
-    if latest['release'] == version['release']:
+    if version['release'] == latest['release']:
       logging.info('The SDK is up to date.')
       return
 
+    try:
+      this_release = _VersionList(version['release'])
+    except ValueError:
+      logging.warn('Could not parse this release version (%r)',
+                   version['release'])
+    else:
+      try:
+        advertised_release = _VersionList(latest['release'])
+      except ValueError:
+        logging.warn('Could not parse advertised release version (%r)',
+                     latest['release'])
+      else:
+        if this_release > advertised_release:
+          logging.info('This SDK release is newer than the advertised release.')
+          return
+
     api_versions = latest['api_versions']
     if self.config.api_version not in api_versions:
       self._Nag(
@@ -964,6 +1006,149 @@
     fp.close()
 
 
+class UploadBatcher(object):
+  """Helper to batch file uploads."""
+
+  def __init__(self, what, app_id, version, server):
+    """Constructor.
+
+    Args:
+      what: Either 'file' or 'blob' indicating what kind of objects
+        this batcher uploads.  Used in messages and URLs.
+      app_id: The application ID.
+      version: The application version string.
+      server: The RPC server.
+    """
+    assert what in ('file', 'blob'), repr(what)
+    self.what = what
+    self.app_id = app_id
+    self.version = version
+    self.server = server
+    self.single_url = '/api/appversion/add' + what
+    self.batch_url = self.single_url + 's'
+    self.batching = True
+    self.batch = []
+    self.batch_size = 0
+
+  def SendBatch(self):
+    """Send the current batch on its way.
+
+    If successful, resets self.batch and self.batch_size.
+
+    Raises:
+      HTTPError with code=404 if the server doesn't support batching.
+    """
+    boundary = 'boundary'
+    parts = []
+    for path, payload, mime_type in self.batch:
+      while boundary in payload:
+        boundary += '%04x' % random.randint(0, 0xffff)
+        assert len(boundary) < 80, 'Unexpected error, please try again.'
+      part = '\n'.join(['',
+                        'X-Appcfg-File: %s' % urllib.quote(path),
+                        'X-Appcfg-Hash: %s' % _Hash(payload),
+                        'Content-Type: %s' % mime_type,
+                        'Content-Length: %d' % len(payload),
+                        'Content-Transfer-Encoding: 8bit',
+                        '',
+                        payload,
+                        ])
+      parts.append(part)
+    parts.insert(0,
+                 'MIME-Version: 1.0\n'
+                 'Content-Type: multipart/mixed; boundary="%s"\n'
+                 '\n'
+                 'This is a message with multiple parts in MIME format.' %
+                 boundary)
+    parts.append('--\n')
+    delimiter = '\n--%s' % boundary
+    payload = delimiter.join(parts)
+    logging.info('Uploading batch of %d %ss to %s with boundary="%s".',
+                 len(self.batch), self.what, self.batch_url, boundary)
+    self.server.Send(self.batch_url,
+                     payload=payload,
+                     content_type='message/rfc822',
+                     app_id=self.app_id,
+                     version=self.version)
+    self.batch = []
+    self.batch_size = 0
+
+  def SendSingleFile(self, path, payload, mime_type):
+    """Send a single file on its way."""
+    logging.info('Uploading %s %s (%s bytes, type=%s) to %s.',
+                 self.what, path, len(payload), mime_type, self.single_url)
+    self.server.Send(self.single_url,
+                     payload=payload,
+                     content_type=mime_type,
+                     path=path,
+                     app_id=self.app_id,
+                     version=self.version)
+
+  def Flush(self):
+    """Flush the current batch.
+
+    This first attempts to send the batch as a single request; if that
+    fails because the server doesn't support batching, the files are
+    sent one by one, and self.batching is reset to False.
+
+    At the end, self.batch and self.batch_size are reset.
+    """
+    if not self.batch:
+      return
+    try:
+      self.SendBatch()
+    except urllib2.HTTPError, err:
+      if err.code != 404:
+        raise
+
+      logging.info('Old server detected; turning off %s batching.', self.what)
+      self.batching = False
+
+      for path, payload, mime_type in self.batch:
+        self.SendSingleFile(path, payload, mime_type)
+
+      self.batch = []
+      self.batch_size = 0
+
+  def AddToBatch(self, path, payload, mime_type):
+    """Batch a file, possibly flushing first, or perhaps upload it directly.
+
+    Args:
+      path: The name of the file.
+      payload: The contents of the file.
+      mime_type: The MIME Content-type of the file, or None.
+
+    If mime_type is None, application/octet-stream is substituted.
+    """
+    if not mime_type:
+      mime_type = 'application/octet-stream'
+    size = len(payload)
+    if size <= MAX_BATCH_FILE_SIZE:
+      if (len(self.batch) >= MAX_BATCH_COUNT or
+          self.batch_size + size > MAX_BATCH_SIZE):
+        self.Flush()
+      if self.batching:
+        logging.info('Adding %s %s (%s bytes, type=%s) to batch.',
+                     self.what, path, size, mime_type)
+        self.batch.append((path, payload, mime_type))
+        self.batch_size += size + BATCH_OVERHEAD
+        return
+    self.SendSingleFile(path, payload, mime_type)
+
+
+def _Hash(content):
+  """Compute the hash of the content.
+
+  Args:
+    content: The data to hash as a string.
+
+  Returns:
+    The string representation of the hash.
+  """
+  h = sha.new(content).hexdigest()
+  return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
+
+
 class AppVersionUpload(object):
   """Provides facilities to upload a new appversion to the hosting service.
 
@@ -995,18 +1180,11 @@
     self.files = {}
     self.in_transaction = False
     self.deployed = False
-
-  def _Hash(self, content):
-    """Compute the hash of the content.
-
-    Args:
-      content: The data to hash as a string.
-
-    Returns:
-      The string representation of the hash.
-    """
-    h = sha.new(content).hexdigest()
-    return '%s_%s_%s_%s_%s' % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
+    self.batching = True
+    self.file_batcher = UploadBatcher('file', self.app_id, self.version,
+                                      self.server)
+    self.blob_batcher = UploadBatcher('blob', self.app_id, self.version,
+                                      self.server)
 
   def AddFile(self, path, file_handle):
     """Adds the provided file to the list to be pushed to the server.
@@ -1024,7 +1202,7 @@
       return
 
     pos = file_handle.tell()
-    content_hash = self._Hash(file_handle.read())
+    content_hash = _Hash(file_handle.read())
     file_handle.seek(pos, 0)
 
     self.files[path] = content_hash
@@ -1084,7 +1262,7 @@
     CloneFiles('/api/appversion/cloneblobs', blobs_to_clone, 'static')
     CloneFiles('/api/appversion/clonefiles', files_to_clone, 'application')
 
-    logging.info('Files to upload: ' + str(files_to_upload))
+    logging.debug('Files to upload: %s', files_to_upload)
 
     self.files = files_to_upload
     return sorted(files_to_upload.iterkeys())
@@ -1109,14 +1287,11 @@
 
     del self.files[path]
     mime_type = GetMimeTypeIfStaticFile(self.config, path)
-    if mime_type is not None:
-      self.server.Send('/api/appversion/addblob', app_id=self.app_id,
-                       version=self.version, path=path, content_type=mime_type,
-                       payload=file_handle.read())
+    payload = file_handle.read()
+    if mime_type is None:
+      self.file_batcher.AddToBatch(path, payload, mime_type)
     else:
-      self.server.Send('/api/appversion/addfile', app_id=self.app_id,
-                       version=self.version, path=path,
-                       payload=file_handle.read())
+      self.blob_batcher.AddToBatch(path, payload, mime_type)
 
   def Commit(self):
     """Commits the transaction, making the new app version available.
@@ -1249,10 +1424,9 @@
     try:
       missing_files = self.Begin()
       if missing_files:
-        StatusUpdate('Uploading %d files.' % len(missing_files))
+        StatusUpdate('Uploading %d files and blobs.' % len(missing_files))
         num_files = 0
         for missing_file in missing_files:
-          logging.info('Uploading file \'%s\'' % missing_file)
           file_handle = openfunc(missing_file)
           try:
             self.UploadFile(missing_file, file_handle)
@@ -1260,7 +1434,11 @@
             file_handle.close()
           num_files += 1
           if num_files % 500 == 0:
-            StatusUpdate('Uploaded %d files.' % num_files)
+            StatusUpdate('Processed %d out of %s.' %
+                         (num_files, len(missing_files)))
+        self.file_batcher.Flush()
+        self.blob_batcher.Flush()
+        StatusUpdate('Uploaded %d files and blobs' % num_files)
 
       self.Commit()
 
@@ -1268,6 +1446,10 @@
       logging.info('User interrupted. Aborting.')
       self.Rollback()
       raise
+    except urllib2.HTTPError, err:
+      logging.info('HTTP Error (%s)', err)
+      self.Rollback()
+      raise
     except:
       logging.exception('An unexpected error occurred. Aborting.')
       self.Rollback()
@@ -1856,6 +2038,12 @@
 
     if self.options.num_days is None:
       self.options.num_days = int(not self.options.append)
+
+    try:
+      end_date = self._ParseEndDate(self.options.end_date)
+    except ValueError:
+      self.parser.error('End date must be in the format YYYY-MM-DD.')
+
     basepath = self.args[0]
     appyaml = self._ParseAppYaml(basepath)
     rpc_server = self._GetRpcServer()
@@ -1863,11 +2051,27 @@
                                    self.options.num_days,
                                    self.options.append,
                                    self.options.severity,
-                                   time.time(),
+                                   end_date,
                                    self.options.vhost,
                                    self.options.include_vhost)
     logs_requester.DownloadLogs()
 
+  def _ParseEndDate(self, date, time_func=time.time):
+    """Translates a user-readable end date to a POSIX timestamp.
+
+    Args:
+      date: A utc date string as YYYY-MM-DD.
+      time_func: time.time() function for testing.
+
+    Returns:
+      A POSIX timestamp representing the last moment of that day.
+      If no date is given, returns a timestamp representing now.
+    """
+    if not date:
+      return time_func()
+    struct_time = time.strptime('%s' % date, '%Y-%m-%d')
+    return calendar.timegm(struct_time) + 86400
+
   def _RequestLogsOptions(self, parser):
     """Adds request_logs-specific options to 'parser'.
 
@@ -1896,6 +2100,10 @@
     parser.add_option('--include_vhost', dest='include_vhost',
                       action='store_true', default=False,
                       help='Include virtual host in log messages.')
+    parser.add_option('--end_date', dest='end_date',
+                      action='store', default='',
+                      help='End date (as YYYY-MM-DD) of period for log data. '
+                      'Defaults to today.')
 
   def CronInfo(self, now=None, output=sys.stdout):
     """Displays information about cron definitions.
@@ -2032,7 +2240,12 @@
                      'email',
                      'debug',
                      'exporter_opts',
+                     'mapper_opts',
                      'result_db_filename',
+                     'mapper_opts',
+                     'dry_run',
+                     'dump',
+                     'restore',
                      )])
 
   def PerformDownload(self, run_fn=None):
@@ -2050,6 +2263,9 @@
     args = self._MakeLoaderArgs()
     args['download'] = True
     args['has_header'] = False
+    args['map'] = False
+    args['dump'] = False
+    args['restore'] = False
 
     run_fn(args)
 
@@ -2067,6 +2283,9 @@
 
     args = self._MakeLoaderArgs()
     args['download'] = False
+    args['map'] = False
+    args['dump'] = False
+    args['restore'] = False
 
     run_fn(args)
 
@@ -2114,6 +2333,9 @@
                       help='File to write bulkloader logs.  If not supplied '
                       'then a new log file will be created, named: '
                       'bulkloader-log-TIMESTAMP.')
+    parser.add_option('--dry_run', action='store_true',
+                      dest='dry_run', default=False,
+                      help='Do not execute any remote_api calls')
 
   def _PerformUploadOptions(self, parser):
     """Adds 'upload_data' specific options to the 'parser' passed in.
--- a/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/bulkloader.py	Sun Sep 06 23:31:53 2009 +0200
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
 """Imports data over HTTP.
 
 Usage:
@@ -33,7 +32,7 @@
                             the URL endpoint. The more data per row/Entity, the
                             smaller the batch size should be. (Default 10)
     --config_file=<path>    File containing Model and Loader definitions.
-                            (Required)
+                            (Required unless --dump or --restore are used)
     --db_filename=<path>    Specific progress database to write to, or to
                             resume from. If not supplied, then a new database
                             will be started, named:
@@ -41,6 +40,8 @@
                             The special filename "skip" may be used to simply
                             skip reading/writing any progress information.
     --download              Export entities to a file.
+    --dry_run               Do not execute any remote_api calls.
+    --dump                  Use zero-configuration dump format.
     --email=<string>        The username to use. Will prompt if omitted.
     --exporter_opts=<string>
                             A string to pass to the Exporter.initialize method.
@@ -54,9 +55,12 @@
     --log_file=<path>       File to write bulkloader logs.  If not supplied
                             then a new log file will be created, named:
                             bulkloader-log-TIMESTAMP.
+    --map                   Map an action across datastore entities.
+    --mapper_opts=<string>  A string to pass to the Mapper.Initialize method.
     --num_threads=<int>     Number of threads to use for uploading entities
                             (Default 10)
     --passin                Read the login password from stdin.
+    --restore               Restore from zero-configuration dump format.
     --result_db_filename=<path>
                             Result database to write to for downloads.
     --rps_limit=<int>       The maximum number of records per second to
@@ -78,7 +82,6 @@
 
 
 
-import cPickle
 import csv
 import errno
 import getopt
@@ -88,20 +91,31 @@
 import os
 import Queue
 import re
+import shutil
 import signal
 import StringIO
 import sys
 import threading
 import time
+import traceback
 import urllib2
 import urlparse
 
+from google.appengine.datastore import entity_pb
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore
 from google.appengine.api import datastore_errors
+from google.appengine.datastore import datastore_pb
 from google.appengine.ext import db
+from google.appengine.ext import key_range as key_range_module
 from google.appengine.ext.db import polymodel
 from google.appengine.ext.remote_api import remote_api_stub
+from google.appengine.ext.remote_api import throttle as remote_api_throttle
 from google.appengine.runtime import apiproxy_errors
+from google.appengine.tools import adaptive_thread_pool
 from google.appengine.tools import appengine_rpc
+from google.appengine.tools.requeue import ReQueue
 
 try:
   import sqlite3
@@ -110,10 +124,14 @@
 
 logger = logging.getLogger('google.appengine.tools.bulkloader')
 
+KeyRange = key_range_module.KeyRange
+
 DEFAULT_THREAD_COUNT = 10
 
 DEFAULT_BATCH_SIZE = 10
 
+DEFAULT_DOWNLOAD_BATCH_SIZE = 100
+
 DEFAULT_QUEUE_SIZE = DEFAULT_THREAD_COUNT * 10
 
 _THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
@@ -125,9 +143,7 @@
 
 STATE_GETTING = 1
 STATE_GOT = 2
-STATE_NOT_GOT = 3
-
-MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
+STATE_ERROR = 3
 
 DATA_CONSUMED_TO_HERE = 'DATA_CONSUMED_TO_HERE'
 
@@ -142,16 +158,8 @@
 
 DEFAULT_REQUEST_LIMIT = 8
 
-BANDWIDTH_UP = 'http-bandwidth-up'
-BANDWIDTH_DOWN = 'http-bandwidth-down'
-REQUESTS = 'http-requests'
-HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
-HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
-HTTPS_REQUESTS = 'https-requests'
-RECORDS = 'records'
-
-MAXIMUM_INCREASE_DURATION = 8.0
-MAXIMUM_HOLD_DURATION = 10.0
+MAXIMUM_INCREASE_DURATION = 5.0
+MAXIMUM_HOLD_DURATION = 12.0
 
 
 def ImportStateMessage(state):
@@ -170,7 +178,17 @@
       STATE_READ: 'Batch read from file.',
       STATE_GETTING: 'Fetching batch from server',
       STATE_GOT: 'Batch successfully fetched.',
-      STATE_NOT_GOT: 'Error while fetching batch'
+      STATE_ERROR: 'Error while fetching batch'
+  }[state])
+
+
+def MapStateMessage(state):
+  """Converts a numeric state identifier to a status message."""
+  return ({
+      STATE_READ: 'Batch read from file.',
+      STATE_GETTING: 'Querying for batch from server',
+      STATE_GOT: 'Batch successfully fetched.',
+      STATE_ERROR: 'Error while fetching or mapping.'
   }[state])
 
 
@@ -180,7 +198,7 @@
       STATE_READ: 'READ',
       STATE_GETTING: 'GETTING',
       STATE_GOT: 'GOT',
-      STATE_NOT_GOT: 'NOT_GOT'
+      STATE_ERROR: 'NOT_GOT'
   }[state])
 
 
@@ -190,7 +208,7 @@
       STATE_READ: 'READ',
       STATE_GETTING: 'SENDING',
       STATE_GOT: 'SENT',
-      STATE_NOT_GOT: 'NOT_SENT'
+      STATE_NOT_SENT: 'NOT_SENT'
   }[state])
 
 
@@ -234,16 +252,35 @@
   """A filename passed in by the user refers to a non-writable output file."""
 
 
-class KeyRangeError(Error):
-  """Error while trying to generate a KeyRange."""
-
-
 class BadStateError(Error):
   """A work item in an unexpected state was encountered."""
 
 
+class KeyRangeError(Error):
+  """An error during construction of a KeyRangeItem."""
+
+
+class FieldSizeLimitError(Error):
+  """The csv module tried to read a field larger than the size limit."""
+
+  def __init__(self, limit):
+    self.message = """
+A field in your CSV input file has exceeded the current limit of %d.
+
+You can raise this limit by adding the following lines to your config file:
+
+import csv
+csv.field_size_limit(new_limit)
+
+where new_limit is number larger than the size in bytes of the largest
+field in your CSV.
+""" % limit
+    Error.__init__(self, self.message)
+
+
 class NameClashError(Error):
   """A name clash occurred while trying to alias old method names."""
+
   def __init__(self, old_name, new_name, klass):
     Error.__init__(self, old_name, new_name, klass)
     self.old_name = old_name
@@ -253,48 +290,51 @@
 
 def GetCSVGeneratorFactory(kind, csv_filename, batch_size, csv_has_header,
                            openfile=open, create_csv_reader=csv.reader):
-  """Return a factory that creates a CSV-based WorkItem generator.
+  """Return a factory that creates a CSV-based UploadWorkItem generator.
 
   Args:
     kind: The kind of the entities being uploaded.
     csv_filename: File on disk containing CSV data.
-    batch_size: Maximum number of CSV rows to stash into a WorkItem.
+    batch_size: Maximum number of CSV rows to stash into an UploadWorkItem.
     csv_has_header: Whether to skip the first row of the CSV.
     openfile: Used for dependency injection.
     create_csv_reader: Used for dependency injection.
 
   Returns:
     A callable (accepting the Progress Queue and Progress Generators
-    as input) which creates the WorkItem generator.
+    as input) which creates the UploadWorkItem generator.
   """
   loader = Loader.RegisteredLoader(kind)
   loader._Loader__openfile = openfile
   loader._Loader__create_csv_reader = create_csv_reader
   record_generator = loader.generate_records(csv_filename)
 
-  def CreateGenerator(progress_queue, progress_generator):
-    """Initialize a WorkItem generator linked to a progress generator and queue.
+  def CreateGenerator(request_manager, progress_queue, progress_generator):
+    """Initialize a UploadWorkItem generator.
 
     Args:
+      request_manager: A RequestManager instance.
       progress_queue: A ProgressQueue instance to send progress information.
       progress_generator: A generator of progress information or None.
 
     Returns:
-      A WorkItemGenerator instance.
+      An UploadWorkItemGenerator instance.
     """
-    return WorkItemGenerator(progress_queue,
-                             progress_generator,
-                             record_generator,
-                             csv_has_header,
-                             batch_size)
+    return UploadWorkItemGenerator(request_manager,
+                                   progress_queue,
+                                   progress_generator,
+                                   record_generator,
+                                   csv_has_header,
+                                   batch_size)
 
   return CreateGenerator
 
 
-class WorkItemGenerator(object):
-  """Reads rows from a row generator and generates WorkItems of batches."""
+class UploadWorkItemGenerator(object):
+  """Reads rows from a row generator and generates UploadWorkItems."""
 
   def __init__(self,
+               request_manager,
                progress_queue,
                progress_generator,
                record_generator,
@@ -303,12 +343,15 @@
     """Initialize a WorkItemGenerator.
 
     Args:
+      request_manager: A RequestManager instance with which to associate
+        WorkItems.
       progress_queue: A progress queue with which to associate WorkItems.
       progress_generator: A generator of progress information.
       record_generator: A generator of data records.
       skip_first: Whether to skip the first data record.
       batch_size: The number of data records per WorkItem.
     """
+    self.request_manager = request_manager
     self.progress_queue = progress_queue
     self.progress_generator = progress_generator
     self.reader = record_generator
@@ -360,30 +403,29 @@
       self.line_number += 1
 
   def _MakeItem(self, key_start, key_end, rows, progress_key=None):
-    """Makes a WorkItem containing the given rows, with the given keys.
+    """Makes a UploadWorkItem containing the given rows, with the given keys.
 
     Args:
-      key_start: The start key for the WorkItem.
-      key_end: The end key for the WorkItem.
-      rows: A list of the rows for the WorkItem.
-      progress_key: The progress key for the WorkItem
+      key_start: The start key for the UploadWorkItem.
+      key_end: The end key for the UploadWorkItem.
+      rows: A list of the rows for the UploadWorkItem.
+      progress_key: The progress key for the UploadWorkItem
 
     Returns:
-      A WorkItem instance for the given batch.
+      An UploadWorkItem instance for the given batch.
     """
     assert rows
 
-    item = WorkItem(self.progress_queue, rows,
-                    key_start, key_end,
-                    progress_key=progress_key)
+    item = UploadWorkItem(self.request_manager, self.progress_queue, rows,
+                          key_start, key_end, progress_key=progress_key)
 
     return item
 
   def Batches(self):
-    """Reads from the record_generator and generates WorkItems.
+    """Reads from the record_generator and generates UploadWorkItems.
 
     Yields:
-      Instances of class WorkItem
+      Instances of class UploadWorkItem
 
     Raises:
       ResumeError: If the progress database and data file indicate a different
@@ -468,37 +510,50 @@
     """
     csv_file = self.openfile(self.csv_filename, 'rb')
     reader = self.create_csv_reader(csv_file, skipinitialspace=True)
-    return reader
-
-
-class KeyRangeGenerator(object):
+    try:
+      for record in reader:
+        yield record
+    except csv.Error, e:
+      if e.args and e.args[0].startswith('field larger than field limit'):
+        limit = e.args[1]
+        raise FieldSizeLimitError(limit)
+      else:
+        raise
+
+
+class KeyRangeItemGenerator(object):
   """Generates ranges of keys to download.
 
   Reads progress information from the progress database and creates
-  KeyRange objects corresponding to incompletely downloaded parts of an
+  KeyRangeItem objects corresponding to incompletely downloaded parts of an
   export.
   """
 
-  def __init__(self, kind, progress_queue, progress_generator):
-    """Initialize the KeyRangeGenerator.
+  def __init__(self, request_manager, kind, progress_queue, progress_generator,
+               key_range_item_factory):
+    """Initialize the KeyRangeItemGenerator.
 
     Args:
+      request_manager: A RequestManager instance.
       kind: The kind of entities being transferred.
       progress_queue: A queue used for tracking progress information.
       progress_generator: A generator of prior progress information, or None
         if there is no prior status.
+      key_range_item_factory: A factory to produce KeyRangeItems.
     """
+    self.request_manager = request_manager
     self.kind = kind
     self.row_count = 0
     self.xfer_count = 0
     self.progress_queue = progress_queue
     self.progress_generator = progress_generator
+    self.key_range_item_factory = key_range_item_factory
 
   def Batches(self):
     """Iterate through saved progress information.
 
     Yields:
-      KeyRange instances corresponding to undownloaded key ranges.
+      KeyRangeItem instances corresponding to undownloaded key ranges.
     """
     if self.progress_generator is not None:
       for progress_key, state, key_start, key_end in self.progress_generator:
@@ -506,397 +561,27 @@
           key_start = ParseKey(key_start)
           key_end = ParseKey(key_end)
 
-          result = KeyRange(self.progress_queue,
-                            self.kind,
-                            key_start=key_start,
-                            key_end=key_end,
-                            progress_key=progress_key,
-                            direction=KeyRange.ASC,
-                            state=STATE_READ)
+          key_range = KeyRange(key_start=key_start,
+                               key_end=key_end)
+
+          result = self.key_range_item_factory(self.request_manager,
+                                               self.progress_queue,
+                                               self.kind,
+                                               key_range,
+                                               progress_key=progress_key,
+                                               state=STATE_READ)
           yield result
     else:
-
-      yield KeyRange(
-          self.progress_queue, self.kind,
-          key_start=None,
-          key_end=None,
-          direction=KeyRange.DESC)
-
-
-class ReQueue(object):
-  """A special thread-safe queue.
-
-  A ReQueue allows unfinished work items to be returned with a call to
-  reput().  When an item is reput, task_done() should *not* be called
-  in addition, getting an item that has been reput does not increase
-  the number of outstanding tasks.
-
-  This class shares an interface with Queue.Queue and provides the
-  additional reput method.
-  """
-
-  def __init__(self,
-               queue_capacity,
-               requeue_capacity=None,
-               queue_factory=Queue.Queue,
-               get_time=time.time):
-    """Initialize a ReQueue instance.
-
-    Args:
-      queue_capacity: The number of items that can be put in the ReQueue.
-      requeue_capacity: The numer of items that can be reput in the ReQueue.
-      queue_factory: Used for dependency injection.
-      get_time: Used for dependency injection.
-    """
-    if requeue_capacity is None:
-      requeue_capacity = queue_capacity
-
-    self.get_time = get_time
-    self.queue = queue_factory(queue_capacity)
-    self.requeue = queue_factory(requeue_capacity)
-    self.lock = threading.Lock()
-    self.put_cond = threading.Condition(self.lock)
-    self.get_cond = threading.Condition(self.lock)
-
-  def _DoWithTimeout(self,
-                     action,
-                     exc,
-                     wait_cond,
-                     done_cond,
-                     lock,
-                     timeout=None,
-                     block=True):
-    """Performs the given action with a timeout.
-
-    The action must be non-blocking, and raise an instance of exc on a
-    recoverable failure.  If the action fails with an instance of exc,
-    we wait on wait_cond before trying again.  Failure after the
-    timeout is reached is propagated as an exception.  Success is
-    signalled by notifying on done_cond and returning the result of
-    the action.  If action raises any exception besides an instance of
-    exc, it is immediately propagated.
-
-    Args:
-      action: A callable that performs a non-blocking action.
-      exc: An exception type that is thrown by the action to indicate
-        a recoverable error.
-      wait_cond: A condition variable which should be waited on when
-        action throws exc.
-      done_cond: A condition variable to signal if the action returns.
-      lock: The lock used by wait_cond and done_cond.
-      timeout: A non-negative float indicating the maximum time to wait.
-      block: Whether to block if the action cannot complete immediately.
-
-    Returns:
-      The result of the action, if it is successful.
-
-    Raises:
-      ValueError: If the timeout argument is negative.
-    """
-    if timeout is not None and timeout < 0.0:
-      raise ValueError('\'timeout\' must not be a negative  number')
-    if not block:
-      timeout = 0.0
-    result = None
-    success = False
-    start_time = self.get_time()
-    lock.acquire()
-    try:
-      while not success:
-        try:
-          result = action()
-          success = True
-        except Exception, e:
-          if not isinstance(e, exc):
-            raise e
-          if timeout is not None:
-            elapsed_time = self.get_time() - start_time
-            timeout -= elapsed_time
-            if timeout <= 0.0:
-              raise e
-          wait_cond.wait(timeout)
-    finally:
-      if success:
-        done_cond.notify()
-      lock.release()
-    return result
-
-  def put(self, item, block=True, timeout=None):
-    """Put an item into the requeue.
-
-    Args:
-      item: An item to add to the requeue.
-      block: Whether to block if the requeue is full.
-      timeout: Maximum on how long to wait until the queue is non-full.
-
-    Raises:
-      Queue.Full if the queue is full and the timeout expires.
-    """
-    def PutAction():
-      self.queue.put(item, block=False)
-    self._DoWithTimeout(PutAction,
-                        Queue.Full,
-                        self.get_cond,
-                        self.put_cond,
-                        self.lock,
-                        timeout=timeout,
-                        block=block)
-
-  def reput(self, item, block=True, timeout=None):
-    """Re-put an item back into the requeue.
-
-    Re-putting an item does not increase the number of outstanding
-    tasks, so the reput item should be uniquely associated with an
-    item that was previously removed from the requeue and for which
-    TaskDone has not been called.
-
-    Args:
-      item: An item to add to the requeue.
-      block: Whether to block if the requeue is full.
-      timeout: Maximum on how long to wait until the queue is non-full.
-
-    Raises:
-      Queue.Full is the queue is full and the timeout expires.
-    """
-    def ReputAction():
-      self.requeue.put(item, block=False)
-    self._DoWithTimeout(ReputAction,
-                        Queue.Full,
-                        self.get_cond,
-                        self.put_cond,
-                        self.lock,
-                        timeout=timeout,
-                        block=block)
-
-  def get(self, block=True, timeout=None):
-    """Get an item from the requeue.
-
-    Args:
-      block: Whether to block if the requeue is empty.
-      timeout: Maximum on how long to wait until the requeue is non-empty.
-
-    Returns:
-      An item from the requeue.
-
-    Raises:
-      Queue.Empty if the queue is empty and the timeout expires.
-    """
-    def GetAction():
-      try:
-        result = self.requeue.get(block=False)
-        self.requeue.task_done()
-      except Queue.Empty:
-        result = self.queue.get(block=False)
-      return result
-    return self._DoWithTimeout(GetAction,
-                               Queue.Empty,
-                               self.put_cond,
-                               self.get_cond,
-                               self.lock,
-                               timeout=timeout,
-                               block=block)
-
-  def join(self):
-    """Blocks until all of the items in the requeue have been processed."""
-    self.queue.join()
-
-  def task_done(self):
-    """Indicate that a previously enqueued item has been fully processed."""
-    self.queue.task_done()
-
-  def empty(self):
-    """Returns true if the requeue is empty."""
-    return self.queue.empty() and self.requeue.empty()
-
-  def get_nowait(self):
-    """Try to get an item from the queue without blocking."""
-    return self.get(block=False)
-
-  def qsize(self):
-    return self.queue.qsize() + self.requeue.qsize()
-
-
-class ThrottleHandler(urllib2.BaseHandler):
-  """A urllib2 handler for http and https requests that adds to a throttle."""
-
-  def __init__(self, throttle):
-    """Initialize a ThrottleHandler.
-
-    Args:
-      throttle: A Throttle instance to call for bandwidth and http/https request
-        throttling.
-    """
-    self.throttle = throttle
-
-  def AddRequest(self, throttle_name, req):
-    """Add to bandwidth throttle for given request.
-
-    Args:
-      throttle_name: The name of the bandwidth throttle to add to.
-      req: The request whose size will be added to the throttle.
-    """
-    size = 0
-    for key, value in req.headers.iteritems():
-      size += len('%s: %s\n' % (key, value))
-    for key, value in req.unredirected_hdrs.iteritems():
-      size += len('%s: %s\n' % (key, value))
-    (unused_scheme,
-     unused_host_port, url_path,
-     unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
-    size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
-    data = req.get_data()
-    if data:
-      size += len(data)
-    self.throttle.AddTransfer(throttle_name, size)
-
-  def AddResponse(self, throttle_name, res):
-    """Add to bandwidth throttle for given response.
-
-    Args:
-      throttle_name: The name of the bandwidth throttle to add to.
-      res: The response whose size will be added to the throttle.
-    """
-    content = res.read()
-    def ReturnContent():
-      return content
-    res.read = ReturnContent
-    size = len(content)
-    headers = res.info()
-    for key, value in headers.items():
-      size += len('%s: %s\n' % (key, value))
-    self.throttle.AddTransfer(throttle_name, size)
-
-  def http_request(self, req):
-    """Process an HTTP request.
-
-    If the throttle is over quota, sleep first.  Then add request size to
-    throttle before returning it to be sent.
-
-    Args:
-      req: A urllib2.Request object.
-
-    Returns:
-      The request passed in.
-    """
-    self.throttle.Sleep()
-    self.AddRequest(BANDWIDTH_UP, req)
-    return req
-
-  def https_request(self, req):
-    """Process an HTTPS request.
-
-    If the throttle is over quota, sleep first.  Then add request size to
-    throttle before returning it to be sent.
-
-    Args:
-      req: A urllib2.Request object.
-
-    Returns:
-      The request passed in.
-    """
-    self.throttle.Sleep()
-    self.AddRequest(HTTPS_BANDWIDTH_UP, req)
-    return req
-
-  def http_response(self, unused_req, res):
-    """Process an HTTP response.
-
-    The size of the response is added to the bandwidth throttle and the request
-    throttle is incremented by one.
-
-    Args:
-      unused_req: The urllib2 request for this response.
-      res: A urllib2 response object.
-
-    Returns:
-      The response passed in.
-    """
-    self.AddResponse(BANDWIDTH_DOWN, res)
-    self.throttle.AddTransfer(REQUESTS, 1)
-    return res
-
-  def https_response(self, unused_req, res):
-    """Process an HTTPS response.
-
-    The size of the response is added to the bandwidth throttle and the request
-    throttle is incremented by one.
-
-    Args:
-      unused_req: The urllib2 request for this response.
-      res: A urllib2 response object.
-
-    Returns:
-      The response passed in.
-    """
-    self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
-    self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
-    return res
-
-
-class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
-  """Provides a simplified RPC-style interface for HTTP requests.
-
-  This RPC server uses a Throttle to prevent exceeding quotas.
-  """
-
-  def __init__(self, throttle, request_manager, *args, **kwargs):
-    """Initialize a ThrottledHttpRpcServer.
-
-    Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
-
-    Args:
-      throttle: A Throttles instance.
-      request_manager: A RequestManager instance.
-      args: Positional arguments to pass through to
-        appengine_rpc.HttpRpcServer.__init__
-      kwargs: Keyword arguments to pass through to
-        appengine_rpc.HttpRpcServer.__init__
-    """
-    self.throttle = throttle
-    appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
-    request_manager.rpc_server = self
-
-  def _GetOpener(self):
-    """Returns an OpenerDirector that supports cookies and ignores redirects.
-
-    Returns:
-      A urllib2.OpenerDirector object.
-    """
-    opener = appengine_rpc.HttpRpcServer._GetOpener(self)
-    opener.add_handler(ThrottleHandler(self.throttle))
-
-    return opener
-
-
-def ThrottledHttpRpcServerFactory(throttle, request_manager):
-  """Create a factory to produce ThrottledHttpRpcServer for a given throttle.
-
-  Args:
-    throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
-    request_manager: A RequestManager instance.
-
-  Returns:
-    A factory to produce a ThrottledHttpRpcServer.
-  """
-
-  def MakeRpcServer(*args, **kwargs):
-    """Factory to produce a ThrottledHttpRpcServer.
-
-    Args:
-      args: Positional args to pass to ThrottledHttpRpcServer.
-      kwargs: Keyword args to pass to ThrottledHttpRpcServer.
-
-    Returns:
-      A ThrottledHttpRpcServer instance.
-    """
-    kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
-    kwargs['save_cookies'] = True
-    return ThrottledHttpRpcServer(throttle, request_manager, *args, **kwargs)
-  return MakeRpcServer
-
-
-class ExportResult(object):
-  """Holds the decoded content for the result of an export requests."""
+      key_range = KeyRange()
+
+      yield self.key_range_item_factory(self.request_manager,
+                                        self.progress_queue,
+                                        self.kind,
+                                        key_range)
+
+
+class DownloadResult(object):
+  """Holds the result of an entity download."""
 
   def __init__(self, continued, direction, keys, entities):
     self.continued = continued
@@ -905,21 +590,31 @@
     self.entities = entities
     self.count = len(keys)
     assert self.count == len(entities)
-    assert direction in (KeyRange.ASC, KeyRange.DESC)
+    assert direction in (key_range_module.KeyRange.ASC,
+                         key_range_module.KeyRange.DESC)
     if self.count > 0:
-      if direction == KeyRange.ASC:
+      if direction == key_range_module.KeyRange.ASC:
         self.key_start = keys[0]
         self.key_end = keys[-1]
       else:
         self.key_start = keys[-1]
         self.key_end = keys[0]
 
+  def Entities(self):
+    """Returns the list of entities for this result in key order."""
+    if self.direction == key_range_module.KeyRange.ASC:
+      return list(self.entities)
+    else:
+      result = list(self.entities)
+      result.reverse()
+      return result
+
   def __str__(self):
     return 'continued = %s\n%s' % (
-        str(self.continued), '\n'.join(self.entities))
-
-
-class _WorkItem(object):
+        str(self.continued), '\n'.join(str(self.entities)))
+
+
+class _WorkItem(adaptive_thread_pool.WorkItem):
   """Holds a description of a unit of upload or download work."""
 
   def __init__(self, progress_queue, key_start, key_end, state_namer,
@@ -928,20 +623,101 @@
 
     Args:
       progress_queue: A queue used for tracking progress information.
-      key_start: The starting key, inclusive.
-      key_end: The ending key, inclusive.
+      key_start: The start key of the work item.
+      key_end: The end key of the work item.
       state_namer: Function to describe work item states.
       state: The initial state of the work item.
       progress_key: If this WorkItem represents state from a prior run,
         then this will be the key within the progress database.
     """
+    adaptive_thread_pool.WorkItem.__init__(self,
+                                           '[%s-%s]' % (key_start, key_end))
     self.progress_queue = progress_queue
-    self.key_start = key_start
-    self.key_end = key_end
     self.state_namer = state_namer
     self.state = state
     self.progress_key = progress_key
     self.progress_event = threading.Event()
+    self.key_start = key_start
+    self.key_end = key_end
+    self.error = None
+    self.traceback = None
+
+  def _TransferItem(self, thread_pool):
+    raise NotImplementedError()
+
+  def SetError(self):
+    """Sets the error and traceback information for this thread.
+
+    This must be called from an exception handler.
+    """
+    if not self.error:
+      exc_info = sys.exc_info()
+      self.error = exc_info[1]
+      self.traceback = exc_info[2]
+
+  def PerformWork(self, thread_pool):
+    """Perform the work of this work item and report the results.
+
+    Args:
+      thread_pool: An AdaptiveThreadPool instance.
+
+    Returns:
+      A tuple (status, instruction) of the work status and an instruction
+      for the ThreadGate.
+    """
+    status = adaptive_thread_pool.WorkItem.FAILURE
+    instruction = adaptive_thread_pool.ThreadGate.DECREASE
+
+    try:
+      self.MarkAsTransferring()
+
+      try:
+        transfer_time = self._TransferItem(thread_pool)
+        if transfer_time is None:
+          status = adaptive_thread_pool.WorkItem.RETRY
+          instruction = adaptive_thread_pool.ThreadGate.HOLD
+        else:
+          logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
+                       threading.currentThread().getName(), self, self.count,
+                       transfer_time)
+          sys.stdout.write('.')
+          sys.stdout.flush()
+          status = adaptive_thread_pool.WorkItem.SUCCESS
+          if transfer_time <= MAXIMUM_INCREASE_DURATION:
+            instruction = adaptive_thread_pool.ThreadGate.INCREASE
+          elif transfer_time <= MAXIMUM_HOLD_DURATION:
+            instruction = adaptive_thread_pool.ThreadGate.HOLD
+      except (db.InternalError, db.NotSavedError, db.Timeout,
+              db.TransactionFailedError,
+              apiproxy_errors.OverQuotaError,
+              apiproxy_errors.DeadlineExceededError,
+              apiproxy_errors.ApplicationError), e:
+        status = adaptive_thread_pool.WorkItem.RETRY
+        logger.exception('Retrying on non-fatal datastore error: %s', e)
+      except urllib2.HTTPError, e:
+        http_status = e.code
+        if http_status == 403 or (http_status >= 500 and http_status < 600):
+          status = adaptive_thread_pool.WorkItem.RETRY
+          logger.exception('Retrying on non-fatal HTTP error: %d %s',
+                           http_status, e.msg)
+        else:
+          self.SetError()
+          status = adaptive_thread_pool.WorkItem.FAILURE
+      except urllib2.URLError, e:
+        if IsURLErrorFatal(e):
+          self.SetError()
+          status = adaptive_thread_pool.WorkItem.FAILURE
+        else:
+          status = adaptive_thread_pool.WorkItem.RETRY
+          logger.exception('Retrying on non-fatal URL error: %s', e.reason)
+
+    finally:
+      if status == adaptive_thread_pool.WorkItem.SUCCESS:
+        self.MarkAsTransferred()
+      else:
+        self.MarkAsError()
+
+    return (status, instruction)
 
   def _AssertInState(self, *states):
     """Raises an Error if the state of this range is not in states."""
@@ -963,7 +739,7 @@
 
   def MarkAsTransferring(self):
     """Mark this _WorkItem as transferring, updating the progress database."""
-    self._AssertInState(STATE_READ, STATE_NOT_GOT)
+    self._AssertInState(STATE_READ, STATE_ERROR)
     self._AssertProgressKey()
     self._StateTransition(STATE_GETTING, blocking=True)
 
@@ -975,7 +751,7 @@
     """Mark this _WorkItem as failed, updating the progress database."""
     self._AssertInState(STATE_GETTING)
     self._AssertProgressKey()
-    self._StateTransition(STATE_NOT_GOT, blocking=True)
+    self._StateTransition(STATE_ERROR, blocking=True)
 
   def _StateTransition(self, new_state, blocking=False):
     """Transition the work item to a new state, storing progress information.
@@ -998,12 +774,12 @@
 
 
 
-class WorkItem(_WorkItem):
+class UploadWorkItem(_WorkItem):
   """Holds a unit of uploading work.
 
-  A WorkItem represents a number of entities that need to be uploaded to
+  A UploadWorkItem represents a number of entities that need to be uploaded to
   Google App Engine. These entities are encoded in the "content" field of
-  the WorkItem, and will be POST'd as-is to the server.
+  the UploadWorkItem, and will be POST'd as-is to the server.
 
   The entities are identified by a range of numeric keys, inclusively. In
   the case of a resumption of an upload, or a replay to correct errors,
@@ -1013,16 +789,17 @@
   fill the entire range, they must simply bound a range of valid keys.
   """
 
-  def __init__(self, progress_queue, rows, key_start, key_end,
+  def __init__(self, request_manager, progress_queue, rows, key_start, key_end,
                progress_key=None):
-    """Initialize the WorkItem instance.
+    """Initialize the UploadWorkItem instance.
 
     Args:
+      request_manager: A RequestManager instance.
       progress_queue: A queue used for tracking progress information.
       rows: A list of pairs of a line number and a list of column values
       key_start: The (numeric) starting key, inclusive.
       key_end: The (numeric) ending key, inclusive.
-      progress_key: If this WorkItem represents state from a prior run,
+      progress_key: If this UploadWorkItem represents state from a prior run,
         then this will be the key within the progress database.
     """
     _WorkItem.__init__(self, progress_queue, key_start, key_end,
@@ -1033,6 +810,7 @@
     assert isinstance(key_end, (int, long))
     assert key_start <= key_end
 
+    self.request_manager = request_manager
     self.rows = rows
     self.content = None
     self.count = len(rows)
@@ -1040,8 +818,24 @@
   def __str__(self):
     return '[%s-%s]' % (self.key_start, self.key_end)
 
+  def _TransferItem(self, thread_pool, get_time=time.time):
+    """Transfers the entities associated with an item.
+
+    Args:
+      thread_pool: An AdaptiveThreadPool instance.
+      get_time: Used for dependency injection.
+    """
+    t = get_time()
+    if not self.content:
+      self.content = self.request_manager.EncodeContent(self.rows)
+    try:
+      self.request_manager.PostEntities(self.content)
+    except:
+      raise
+    return get_time() - t
+
   def MarkAsTransferred(self):
-    """Mark this WorkItem as sucessfully-sent to the server."""
+    """Mark this UploadWorkItem as sucessfully-sent to the server."""
 
     self._AssertInState(STATE_SENDING)
     self._AssertProgressKey()
@@ -1068,45 +862,31 @@
     implementation_class = db.class_for_kind(kind_or_class_key)
   return implementation_class
 
-class EmptyQuery(db.Query):
-  def get(self):
-    return None
-
-  def fetch(self, limit=1000, offset=0):
-    return []
-
-  def count(self, limit=1000):
-    return 0
-
 
 def KeyLEQ(key1, key2):
   """Compare two keys for less-than-or-equal-to.
 
-  All keys with numeric ids come before all keys with names.
+  All keys with numeric ids come before all keys with names. None represents
+  an unbounded end-point so it is both greater and less than any other key.
 
   Args:
-    key1: An int or db.Key instance.
-    key2: An int or db.Key instance.
+    key1: An int or datastore.Key instance.
+    key2: An int or datastore.Key instance.
 
   Returns:
     True if key1 <= key2
   """
-  if isinstance(key1, int) and isinstance(key2, int):
-    return key1 <= key2
   if key1 is None or key2 is None:
     return True
-  if key1.id() and not key2.id():
-    return True
-  return key1.id_or_name() <= key2.id_or_name()
-
-
-class KeyRange(_WorkItem):
-  """Represents an item of download work.
-
-  A KeyRange object represents a key range (key_start, key_end) and a
-  scan direction (KeyRange.DESC or KeyRange.ASC).  The KeyRange object
-  has an associated state: STATE_READ, STATE_GETTING, STATE_GOT, and
-  STATE_ERROR.
+  return key1 <= key2
+
+
+class KeyRangeItem(_WorkItem):
+  """Represents an item of work that scans over a key range.
+
+  A KeyRangeItem object represents holds a KeyRange
+  and has an associated state: STATE_READ, STATE_GETTING, STATE_GOT,
+  and STATE_ERROR.
 
   - STATE_READ indicates the range ready to be downloaded by a worker thread.
   - STATE_GETTING indicates the range is currently being downloaded.
@@ -1114,280 +894,143 @@
   - STATE_ERROR indicates that an error occurred during the last download
     attempt
 
-  KeyRanges not in the STATE_GOT state are stored in the progress database.
-  When a piece of KeyRange work is downloaded, the download may cover only
-  a portion of the range.  In this case, the old KeyRange is removed from
+  KeyRangeItems not in the STATE_GOT state are stored in the progress database.
+  When a piece of KeyRangeItem work is downloaded, the download may cover only
+  a portion of the range.  In this case, the old KeyRangeItem is removed from
   the progress database and ranges covering the undownloaded range are
   generated and stored as STATE_READ in the export progress database.
   """
 
-  DESC = 0
-  ASC = 1
-
-  MAX_KEY_LEN = 500
-
   def __init__(self,
+               request_manager,
                progress_queue,
                kind,
-               direction,
-               key_start=None,
-               key_end=None,
-               include_start=True,
-               include_end=True,
+               key_range,
                progress_key=None,
                state=STATE_READ):
-    """Initialize a KeyRange object.
+    """Initialize a KeyRangeItem object.
 
     Args:
+      request_manager: A RequestManager instance.
       progress_queue: A queue used for tracking progress information.
       kind: The kind of entities for this range.
-      direction: The direction of the query for this range.
-      key_start: The starting key for this range.
-      key_end: The ending key for this range.
-      include_start: Whether the start key should be included in the range.
-      include_end: Whether the end key should be included in the range.
+      key_range: A KeyRange instance for this work item.
       progress_key: The key for this range within the progress database.
       state: The initial state of this range.
-
-    Raises:
-      KeyRangeError: if key_start is None.
     """
-    assert direction in (KeyRange.ASC, KeyRange.DESC)
-    _WorkItem.__init__(self, progress_queue, key_start, key_end,
-                       ExportStateName, state=state, progress_key=progress_key)
+    _WorkItem.__init__(self, progress_queue, key_range.key_start,
+                       key_range.key_end, ExportStateName, state=state,
+                       progress_key=progress_key)
+    self.request_manager = request_manager
     self.kind = kind
-    self.direction = direction
-    self.export_result = None
+    self.key_range = key_range
+    self.download_result = None
     self.count = 0
-    self.include_start = include_start
-    self.include_end = include_end
-    self.SPLIT_KEY = db.Key.from_path(self.kind, unichr(0))
+    self.key_start = key_range.key_start
+    self.key_end = key_range.key_end
 
   def __str__(self):
-    return '[%s-%s]' % (PrettyKey(self.key_start), PrettyKey(self.key_end))
+    return str(self.key_range)
 
   def __repr__(self):
     return self.__str__()
 
   def MarkAsTransferred(self):
-    """Mark this KeyRange as transferred, updating the progress database."""
+    """Mark this KeyRangeItem as transferred, updating the progress database."""
     pass
 
-  def Process(self, export_result, num_threads, batch_size, work_queue):
-    """Mark this KeyRange as success, updating the progress database.
-
-    Process will split this KeyRange based on the content of export_result and
-    adds the unfinished ranges to the work queue.
+  def Process(self, download_result, thread_pool, batch_size,
+              new_state=STATE_GOT):
+    """Mark this KeyRangeItem as success, updating the progress database.
+
+    Process will split this KeyRangeItem based on the content of
+    download_result and adds the unfinished ranges to the work queue.
 
     Args:
-      export_result: An ExportResult instance.
-      num_threads: The number of threads for parallel transfers.
+      download_result: A DownloadResult instance.
+      thread_pool: An AdaptiveThreadPool instance.
       batch_size: The number of entities to transfer per request.
-      work_queue: The work queue to add unfinished ranges to.
-
-    Returns:
-      A list of KeyRanges representing undownloaded datastore key ranges.
+      new_state: The state to transition the completed range to.
     """
     self._AssertInState(STATE_GETTING)
     self._AssertProgressKey()
 
-    self.export_result = export_result
-    self.count = len(export_result.keys)
-    if export_result.continued:
-      self._FinishedRange()._StateTransition(STATE_GOT, blocking=True)
-      self._AddUnfinishedRanges(num_threads, batch_size, work_queue)
+    self.download_result = download_result
+    self.count = len(download_result.keys)
+    if download_result.continued:
+      self._FinishedRange()._StateTransition(new_state, blocking=True)
+      self._AddUnfinishedRanges(thread_pool, batch_size)
     else:
-      self._StateTransition(STATE_GOT, blocking=True)
+      self._StateTransition(new_state, blocking=True)
 
   def _FinishedRange(self):
-    """Returns the range completed by the export_result.
-
-    Returns:
-      A KeyRange representing a completed range.
-    """
-    assert self.export_result is not None
-
-    if self.direction == KeyRange.ASC:
-      key_start = self.key_start
-      if self.export_result.continued:
-        key_end = self.export_result.key_end
-      else:
-        key_end = self.key_end
-    else:
-      key_end = self.key_end
-      if self.export_result.continued:
-        key_start = self.export_result.key_start
-      else:
-        key_start = self.key_start
-
-    result = KeyRange(self.progress_queue,
-                      self.kind,
-                      key_start=key_start,
-                      key_end=key_end,
-                      direction=self.direction)
-
-    result.progress_key = self.progress_key
-    result.export_result = self.export_result
-    result.state = self.state
-    result.count = self.count
-    return result
-
-  def FilterQuery(self, query):
-    """Add query filter to restrict to this key range.
-
-    Args:
-      query: A db.Query instance.
-    """
-    if self.key_start == self.key_end and not (
-        self.include_start or self.include_end):
-      return EmptyQuery()
-    if self.include_start:
-      start_comparator = '>='
-    else:
-      start_comparator = '>'
-    if self.include_end:
-      end_comparator = '<='
-    else:
-      end_comparator = '<'
-    if self.key_start and self.key_end:
-      query.filter('__key__ %s' % start_comparator, self.key_start)
-      query.filter('__key__ %s' % end_comparator, self.key_end)
-    elif self.key_start:
-      query.filter('__key__ %s' % start_comparator, self.key_start)
-    elif self.key_end:
-      query.filter('__key__ %s' % end_comparator, self.key_end)
-
-    return query
-
-  def MakeParallelQuery(self):
-    """Construct a query for this key range, for parallel downloading.
-
-    Returns:
-      A db.Query instance.
-
-    Raises:
-      KeyRangeError: if self.direction is not one of
-        KeyRange.ASC, KeyRange.DESC
-    """
-    if self.direction == KeyRange.ASC:
-      direction = ''
-    elif self.direction == KeyRange.DESC:
-      direction = '-'
-    else:
-      raise KeyRangeError('KeyRange direction unexpected: %s', self.direction)
-    query = db.Query(GetImplementationClass(self.kind))
-    query.order('%s__key__' % direction)
-
-    return self.FilterQuery(query)
-
-  def MakeSerialQuery(self):
-    """Construct a query for this key range without descending __key__ scan.
+    """Returns the range completed by the download_result.
 
     Returns:
-      A db.Query instance.
+      A KeyRangeItem representing a completed range.
     """
-    query = db.Query(GetImplementationClass(self.kind))
-    query.order('__key__')
-
-    return self.FilterQuery(query)
-
-  def _BisectStringRange(self, start, end):
-    if start == end:
-      return (start, start, end)
-    start += '\0'
-    end += '\0'
-    midpoint = []
-    expected_max = 127
-    for i in xrange(min(len(start), len(end))):
-      if start[i] == end[i]:
-        midpoint.append(start[i])
+    assert self.download_result is not None
+
+    if self.key_range.direction == key_range_module.KeyRange.ASC:
+      key_start = self.key_range.key_start
+      if self.download_result.continued:
+        key_end = self.download_result.key_end
       else:
-        ord_sum = ord(start[i]) + ord(end[i])
-        midpoint.append(unichr(ord_sum / 2))
-        if ord_sum % 2:
-          if len(start) > i + 1:
-            ord_start = ord(start[i+1])
-          else:
-            ord_start = 0
-          if ord_start < expected_max:
-            ord_split = (expected_max + ord_start) / 2
-          else:
-            ord_split = (0xFFFF + ord_start) / 2
-          midpoint.append(unichr(ord_split))
-        break
-    return (start[:-1], ''.join(midpoint), end[:-1])
-
-  def SplitRange(self, key_start, include_start, key_end, include_end,
-                 export_result, num_threads, batch_size, work_queue):
-    """Split the key range [key_start, key_end] into a list of ranges."""
-    if export_result.direction == KeyRange.ASC:
-      key_start = export_result.key_end
-      include_start = False
+        key_end = self.key_range.key_end
     else:
-      key_end = export_result.key_start
-      include_end = False
-    key_pairs = []
-    if not key_start:
-      key_pairs.append((key_start, include_start, key_end, include_end,
-                        KeyRange.ASC))
-    elif not key_end:
-      key_pairs.append((key_start, include_start, key_end, include_end,
-                        KeyRange.DESC))
-    elif work_queue.qsize() > 2 * num_threads:
-      key_pairs.append((key_start, include_start, key_end, include_end,
-                        KeyRange.ASC))
-    elif key_start.id() and key_end.id():
-      if key_end.id() - key_start.id() > batch_size:
-        key_half = db.Key.from_path(self.kind,
-                                    (key_start.id() + key_end.id()) / 2)
-        key_pairs.append((key_start, include_start,
-                          key_half, True,
-                          KeyRange.DESC))
-        key_pairs.append((key_half, False,
-                          key_end, include_end,
-                          KeyRange.ASC))
+      key_end = self.key_range.key_end
+      if self.download_result.continued:
+        key_start = self.download_result.key_start
       else:
-        key_pairs.append((key_start, include_start, key_end, include_end,
-                          KeyRange.ASC))
-    elif key_start.name() and key_end.name():
-      (start, middle, end) = self._BisectStringRange(key_start.name(),
-                                                     key_end.name())
-      key_pairs.append((key_start, include_start,
-                        db.Key.from_path(self.kind, middle), True,
-                        KeyRange.DESC))
-      key_pairs.append((db.Key.from_path(self.kind, middle), False,
-                        key_end, include_end,
-                        KeyRange.ASC))
+        key_start = self.key_range.key_start
+
+    key_range = KeyRange(key_start=key_start,
+                         key_end=key_end,
+                         direction=self.key_range.direction)
+
+    result = self.__class__(self.request_manager,
+                            self.progress_queue,
+                            self.kind,
+                            key_range,
+                            progress_key=self.progress_key,
+                            state=self.state)
+
+    result.download_result = self.download_result
+    result.count = self.count
+    return result
+
+  def _SplitAndAddRanges(self, thread_pool, batch_size):
+    """Split the key range [key_start, key_end] into a list of ranges."""
+    if self.download_result.direction == key_range_module.KeyRange.ASC:
+      key_range = KeyRange(
+          key_start=self.download_result.key_end,
+          key_end=self.key_range.key_end,
+          include_start=False)
     else:
-      assert key_start.id() and key_end.name()
-      key_pairs.append((key_start, include_start,
-                        self.SPLIT_KEY, False,
-                        KeyRange.DESC))
-      key_pairs.append((self.SPLIT_KEY, True,
-                        key_end, include_end,
-                        KeyRange.ASC))
-
-    ranges = [KeyRange(self.progress_queue,
-                       self.kind,
-                       key_start=start,
-                       include_start=include_start,
-                       key_end=end,
-                       include_end=include_end,
-                       direction=direction)
-              for (start, include_start, end, include_end, direction)
-              in key_pairs]
+      key_range = KeyRange(
+          key_start=self.key_range.key_start,
+          key_end=self.download_result.key_start,
+          include_end=False)
+
+    if thread_pool.QueuedItemCount() > 2 * thread_pool.num_threads():
+      ranges = [key_range]
+    else:
+      ranges = key_range.split_range(batch_size=batch_size)
 
     for key_range in ranges:
-      key_range.MarkAsRead()
-      work_queue.put(key_range, block=True)
-
-  def _AddUnfinishedRanges(self, num_threads, batch_size, work_queue):
-    """Adds incomplete KeyRanges to the work_queue.
+      key_range_item = self.__class__(self.request_manager,
+                                      self.progress_queue,
+                                      self.kind,
+                                      key_range)
+      key_range_item.MarkAsRead()
+      thread_pool.SubmitItem(key_range_item, block=True)
+
+  def _AddUnfinishedRanges(self, thread_pool, batch_size):
+    """Adds incomplete KeyRanges to the thread_pool.
 
     Args:
-      num_threads: The number of threads for parallel transfers.
+      thread_pool: An AdaptiveThreadPool instance.
       batch_size: The number of entities to transfer per request.
-      work_queue: The work queue to add unfinished ranges to.
 
     Returns:
       A list of KeyRanges representing incomplete datastore key ranges.
@@ -1395,15 +1038,43 @@
     Raises:
       KeyRangeError: if this key range has already been completely transferred.
     """
-    assert self.export_result is not None
-    if self.export_result.continued:
-      self.SplitRange(self.key_start, self.include_start, self.key_end,
-                      self.include_end, self.export_result,
-                      num_threads, batch_size, work_queue)
+    assert self.download_result is not None
+    if self.download_result.continued:
+      self._SplitAndAddRanges(thread_pool, batch_size)
     else:
       raise KeyRangeError('No unfinished part of key range.')
 
 
+class DownloadItem(KeyRangeItem):
+  """A KeyRangeItem for downloading key ranges."""
+
+  def _TransferItem(self, thread_pool, get_time=time.time):
+    """Transfers the entities associated with an item."""
+    t = get_time()
+    download_result = self.request_manager.GetEntities(self)
+    transfer_time = get_time() - t
+    self.Process(download_result, thread_pool,
+                 self.request_manager.batch_size)
+    return transfer_time
+
+
+class MapperItem(KeyRangeItem):
+  """A KeyRangeItem for mapping over key ranges."""
+
+  def _TransferItem(self, thread_pool, get_time=time.time):
+    t = get_time()
+    download_result = self.request_manager.GetEntities(self)
+    transfer_time = get_time() - t
+    mapper = self.request_manager.GetMapper()
+    try:
+      mapper.batch_apply(download_result.Entities())
+    except MapperRetry:
+      return None
+    self.Process(download_result, thread_pool,
+                 self.request_manager.batch_size)
+    return transfer_time
+
+
 class RequestManager(object):
   """A class which wraps a connection to the server."""
 
@@ -1416,7 +1087,8 @@
                batch_size,
                secure,
                email,
-               passin):
+               passin,
+               dry_run=False):
     """Initialize a RequestManager object.
 
     Args:
@@ -1445,23 +1117,39 @@
     self.parallel_download = True
     self.email = email
     self.passin = passin
-    throttled_rpc_server_factory = ThrottledHttpRpcServerFactory(
-        self.throttle, self)
+    self.mapper = None
+    self.dry_run = dry_run
+
+    if self.dry_run:
+      logger.info('Running in dry run mode, skipping remote_api setup')
+      return
+
     logger.debug('Configuring remote_api. url_path = %s, '
                  'servername = %s' % (url_path, host_port))
+
+    def CookieHttpRpcServer(*args, **kwargs):
+      kwargs['save_cookies'] = True
+      kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
+      return appengine_rpc.HttpRpcServer(*args, **kwargs)
+
     remote_api_stub.ConfigureRemoteDatastore(
         app_id,
         url_path,
         self.AuthFunction,
         servername=host_port,
-        rpc_server_factory=throttled_rpc_server_factory,
+        rpc_server_factory=CookieHttpRpcServer,
         secure=self.secure)
+    remote_api_throttle.ThrottleRemoteDatastore(self.throttle)
     logger.debug('Bulkloader using app_id: %s', os.environ['APPLICATION_ID'])
 
   def Authenticate(self):
     """Invoke authentication if necessary."""
-    logger.info('Connecting to %s', self.url_path)
-    self.rpc_server.Send(self.url_path, payload=None)
+    logger.info('Connecting to %s%s', self.host_port, self.url_path)
+    if self.dry_run:
+      self.authenticated = True
+      return
+
+    remote_api_stub.MaybeInvokeAuthentication()
     self.authenticated = True
 
   def AuthFunction(self,
@@ -1506,7 +1194,7 @@
       loader: Used for dependency injection.
 
     Returns:
-      A list of db.Model instances.
+      A list of datastore.Entity instances.
 
     Raises:
       ConfigurationError: if no loader is defined for self.kind
@@ -1520,77 +1208,112 @@
     entities = []
     for line_number, values in rows:
       key = loader.generate_key(line_number, values)
-      if isinstance(key, db.Key):
+      if isinstance(key, datastore.Key):
         parent = key.parent()
         key = key.name()
       else:
         parent = None
       entity = loader.create_entity(values, key_name=key, parent=parent)
+
+      def ToEntity(entity):
+        if isinstance(entity, db.Model):
+          return entity._populate_entity()
+        else:
+          return entity
+
       if isinstance(entity, list):
-        entities.extend(entity)
+        entities.extend(map(ToEntity, entity))
       elif entity:
-        entities.append(entity)
+        entities.append(ToEntity(entity))
 
     return entities
 
-  def PostEntities(self, item):
+  def PostEntities(self, entities):
     """Posts Entity records to a remote endpoint over HTTP.
 
     Args:
-      item: A workitem containing the entities to post.
-
-    Returns:
-      A pair of the estimated size of the request in bytes and the response
-        from the server as a str.
+      entities: A list of datastore entities.
     """
-    entities = item.content
-    db.put(entities)
-
-  def GetEntities(self, key_range):
+    if self.dry_run:
+      return
+    datastore.Put(entities)
+
+  def _QueryForPbs(self, query):
+    """Perform the given query and return a list of entity_pb's."""
+    try:
+      query_pb = query._ToPb(limit=self.batch_size)
+      result_pb = datastore_pb.QueryResult()
+      apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', query_pb,
+                                     result_pb)
+      next_pb = datastore_pb.NextRequest()
+      next_pb.set_count(self.batch_size)
+      next_pb.mutable_cursor().CopyFrom(result_pb.cursor())
+      result_pb = datastore_pb.QueryResult()
+      apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', next_pb, result_pb)
+      return result_pb.result_list()
+    except apiproxy_errors.ApplicationError, e:
+      raise datastore._ToDatastoreError(e)
+
+  def GetEntities(self, key_range_item, key_factory=datastore.Key):
     """Gets Entity records from a remote endpoint over HTTP.
 
     Args:
-     key_range: Range of keys to get.
+     key_range_item: Range of keys to get.
+     key_factory: Used for dependency injection.
 
     Returns:
-      An ExportResult instance.
+      A DownloadResult instance.
 
     Raises:
       ConfigurationError: if no Exporter is defined for self.kind
     """
-    try:
-      Exporter.RegisteredExporter(self.kind)
-    except KeyError:
-      raise ConfigurationError('No Exporter defined for kind %s.' % self.kind)
-
     keys = []
     entities = []
 
     if self.parallel_download:
-      query = key_range.MakeParallelQuery()
+      query = key_range_item.key_range.make_directed_datastore_query(self.kind)
       try:
-        results = query.fetch(self.batch_size)
+        results = self._QueryForPbs(query)
       except datastore_errors.NeedIndexError:
         logger.info('%s: No descending index on __key__, '
                     'performing serial download', self.kind)
         self.parallel_download = False
 
     if not self.parallel_download:
-      key_range.direction = KeyRange.ASC
-      query = key_range.MakeSerialQuery()
-      results = query.fetch(self.batch_size)
+      key_range_item.key_range.direction = key_range_module.KeyRange.ASC
+      query = key_range_item.key_range.make_ascending_datastore_query(self.kind)
+      results = self._QueryForPbs(query)
 
     size = len(results)
 
-    for model in results:
-      key = model.key()
-      entities.append(cPickle.dumps(model))
+    for entity in results:
+      key = key_factory()
+      key._Key__reference = entity.key()
+      entities.append(entity)
       keys.append(key)
 
     continued = (size == self.batch_size)
-    key_range.count = size
-
-    return ExportResult(continued, key_range.direction, keys, entities)
+    key_range_item.count = size
+
+    return DownloadResult(continued, key_range_item.key_range.direction,
+                          keys, entities)
+
+  def GetMapper(self):
+    """Returns a mapper for the registered kind.
+
+    Returns:
+      A Mapper instance.
+
+    Raises:
+      ConfigurationError: if no Mapper is defined for self.kind
+    """
+    if not self.mapper:
+      try:
+        self.mapper = Mapper.RegisteredMapper(self.kind)
+      except KeyError:
+        logger.error('No Mapper defined for kind %s.' % self.kind)
+        raise ConfigurationError('No Mapper defined for kind %s.' % self.kind)
+    return self.mapper
 
 
 def InterruptibleSleep(sleep_time):
@@ -1611,357 +1334,6 @@
       return
 
 
-class ThreadGate(object):
-  """Manage the number of active worker threads.
-
-  The ThreadGate limits the number of threads that are simultaneously
-  uploading batches of records in order to implement adaptive rate
-  control.  The number of simultaneous upload threads that it takes to
-  start causing timeout varies widely over the course of the day, so
-  adaptive rate control allows the uploader to do many uploads while
-  reducing the error rate and thus increasing the throughput.
-
-  Initially the ThreadGate allows only one uploader thread to be active.
-  For each successful upload, another thread is activated and for each
-  failed upload, the number of active threads is reduced by one.
-  """
-
-  def __init__(self, enabled,
-               threshhold1=MAXIMUM_INCREASE_DURATION,
-               threshhold2=MAXIMUM_HOLD_DURATION,
-               sleep=InterruptibleSleep):
-    """Constructor for ThreadGate instances.
-
-    Args:
-      enabled: Whether the thread gate is enabled
-      threshhold1: Maximum duration (in seconds) for a transfer to increase
-        the number of active threads.
-      threshhold2: Maximum duration (in seconds) for a transfer to not decrease
-        the number of active threads.
-    """
-    self.enabled = enabled
-    self.enabled_count = 1
-    self.lock = threading.Lock()
-    self.thread_semaphore = threading.Semaphore(self.enabled_count)
-    self._threads = []
-    self.backoff_time = 0
-    self.sleep = sleep
-    self.threshhold1 = threshhold1
-    self.threshhold2 = threshhold2
-
-  def Register(self, thread):
-    """Register a thread with the thread gate."""
-    self._threads.append(thread)
-
-  def Threads(self):
-    """Yields the registered threads."""
-    for thread in self._threads:
-      yield thread
-
-  def EnableThread(self):
-    """Enable one more worker thread."""
-    self.lock.acquire()
-    try:
-      self.enabled_count += 1
-    finally:
-      self.lock.release()
-    self.thread_semaphore.release()
-
-  def EnableAllThreads(self):
-    """Enable all worker threads."""
-    for unused_idx in xrange(len(self._threads) - self.enabled_count):
-      self.EnableThread()
-
-  def StartWork(self):
-    """Starts a critical section in which the number of workers is limited.
-
-    If thread throttling is enabled then this method starts a critical
-    section which allows self.enabled_count simultaneously operating
-    threads. The critical section is ended by calling self.FinishWork().
-    """
-    if self.enabled:
-      self.thread_semaphore.acquire()
-      if self.backoff_time > 0.0:
-        if not threading.currentThread().exit_flag:
-          logger.info('Backing off: %.1f seconds',
-                      self.backoff_time)
-        self.sleep(self.backoff_time)
-
-  def FinishWork(self):
-    """Ends a critical section started with self.StartWork()."""
-    if self.enabled:
-      self.thread_semaphore.release()
-
-  def TransferSuccess(self, duration):
-    """Informs the throttler that an item was successfully sent.
-
-    If thread throttling is enabled and the duration is low enough, this
-    method will cause an additional thread to run in the critical section.
-
-    Args:
-      duration: The duration of the transfer in seconds.
-    """
-    if duration > self.threshhold2:
-      logger.debug('Transfer took %s, decreasing workers.', duration)
-      self.DecreaseWorkers(backoff=False)
-      return
-    elif duration > self.threshhold1:
-      logger.debug('Transfer took %s, not increasing workers.', duration)
-      return
-    elif self.enabled:
-      if self.backoff_time > 0.0:
-        logger.info('Resetting backoff to 0.0')
-        self.backoff_time = 0.0
-      do_enable = False
-      self.lock.acquire()
-      try:
-        if self.enabled and len(self._threads) > self.enabled_count:
-          do_enable = True
-          self.enabled_count += 1
-      finally:
-        self.lock.release()
-      if do_enable:
-        logger.debug('Increasing active thread count to %d',
-                     self.enabled_count)
-        self.thread_semaphore.release()
-
-  def DecreaseWorkers(self, backoff=True):
-    """Informs the thread_gate that an item failed to send.
-
-    If thread throttling is enabled, this method will cause the
-    throttler to allow one fewer thread in the critical section. If
-    there is only one thread remaining, failures will result in
-    exponential backoff until there is a success.
-
-    Args:
-      backoff: Whether to increase exponential backoff if there is only
-        one thread enabled.
-    """
-    if self.enabled:
-      do_disable = False
-      self.lock.acquire()
-      try:
-        if self.enabled:
-          if self.enabled_count > 1:
-            do_disable = True
-            self.enabled_count -= 1
-          elif backoff:
-            if self.backoff_time == 0.0:
-              self.backoff_time = INITIAL_BACKOFF
-            else:
-              self.backoff_time *= BACKOFF_FACTOR
-      finally:
-        self.lock.release()
-      if do_disable:
-        logger.debug('Decreasing the number of active threads to %d',
-                     self.enabled_count)
-        self.thread_semaphore.acquire()
-
-
-class Throttle(object):
-  """A base class for upload rate throttling.
-
-  Transferring large number of records, too quickly, to an application
-  could trigger quota limits and cause the transfer process to halt.
-  In order to stay within the application's quota, we throttle the
-  data transfer to a specified limit (across all transfer threads).
-  This limit defaults to about half of the Google App Engine default
-  for an application, but can be manually adjusted faster/slower as
-  appropriate.
-
-  This class tracks a moving average of some aspect of the transfer
-  rate (bandwidth, records per second, http connections per
-  second). It keeps two windows of counts of bytes transferred, on a
-  per-thread basis. One block is the "current" block, and the other is
-  the "prior" block. It will rotate the counts from current to prior
-  when ROTATE_PERIOD has passed.  Thus, the current block will
-  represent from 0 seconds to ROTATE_PERIOD seconds of activity
-  (determined by: time.time() - self.last_rotate).  The prior block
-  will always represent a full ROTATE_PERIOD.
-
-  Sleeping is performed just before a transfer of another block, and is
-  based on the counts transferred *before* the next transfer. It really
-  does not matter how much will be transferred, but only that for all the
-  data transferred SO FAR that we have interspersed enough pauses to
-  ensure the aggregate transfer rate is within the specified limit.
-
-  These counts are maintained on a per-thread basis, so we do not require
-  any interlocks around incrementing the counts. There IS an interlock on
-  the rotation of the counts because we do not want multiple threads to
-  multiply-rotate the counts.
-
-  There are various race conditions in the computation and collection
-  of these counts. We do not require precise values, but simply to
-  keep the overall transfer within the bandwidth limits. If a given
-  pause is a little short, or a little long, then the aggregate delays
-  will be correct.
-  """
-
-  ROTATE_PERIOD = 600
-
-  def __init__(self,
-               get_time=time.time,
-               thread_sleep=InterruptibleSleep,
-               layout=None):
-    self.get_time = get_time
-    self.thread_sleep = thread_sleep
-
-    self.start_time = get_time()
-    self.transferred = {}
-    self.prior_block = {}
-    self.totals = {}
-    self.throttles = {}
-
-    self.last_rotate = {}
-    self.rotate_mutex = {}
-    if layout:
-      self.AddThrottles(layout)
-
-  def AddThrottle(self, name, limit):
-    self.throttles[name] = limit
-    self.transferred[name] = {}
-    self.prior_block[name] = {}
-    self.totals[name] = {}
-    self.last_rotate[name] = self.get_time()
-    self.rotate_mutex[name] = threading.Lock()
-
-  def AddThrottles(self, layout):
-    for key, value in layout.iteritems():
-      self.AddThrottle(key, value)
-
-  def Register(self, thread):
-    """Register this thread with the throttler."""
-    thread_name = thread.getName()
-    for throttle_name in self.throttles.iterkeys():
-      self.transferred[throttle_name][thread_name] = 0
-      self.prior_block[throttle_name][thread_name] = 0
-      self.totals[throttle_name][thread_name] = 0
-
-  def VerifyName(self, throttle_name):
-    if throttle_name not in self.throttles:
-      raise AssertionError('%s is not a registered throttle' % throttle_name)
-
-  def AddTransfer(self, throttle_name, token_count):
-    """Add a count to the amount this thread has transferred.
-
-    Each time a thread transfers some data, it should call this method to
-    note the amount sent. The counts may be rotated if sufficient time
-    has passed since the last rotation.
-
-    Note: this method should only be called by the BulkLoaderThread
-    instances. The token count is allocated towards the
-    "current thread".
-
-    Args:
-      throttle_name: The name of the throttle to add to.
-      token_count: The number to add to the throttle counter.
-    """
-    self.VerifyName(throttle_name)
-    transferred = self.transferred[throttle_name]
-    transferred[threading.currentThread().getName()] += token_count
-
-    if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
-      self._RotateCounts(throttle_name)
-
-  def Sleep(self, throttle_name=None):
-    """Possibly sleep in order to limit the transfer rate.
-
-    Note that we sleep based on *prior* transfers rather than what we
-    may be about to transfer. The next transfer could put us under/over
-    and that will be rectified *after* that transfer. Net result is that
-    the average transfer rate will remain within bounds. Spiky behavior
-    or uneven rates among the threads could possibly bring the transfer
-    rate above the requested limit for short durations.
-
-    Args:
-      throttle_name: The name of the throttle to sleep on.  If None or
-        omitted, then sleep on all throttles.
-    """
-    if throttle_name is None:
-      for throttle_name in self.throttles:
-        self.Sleep(throttle_name=throttle_name)
-      return
-
-    self.VerifyName(throttle_name)
-
-    thread = threading.currentThread()
-
-    while True:
-      duration = self.get_time() - self.last_rotate[throttle_name]
-
-      total = 0
-      for count in self.prior_block[throttle_name].values():
-        total += count
-
-      if total:
-        duration += self.ROTATE_PERIOD
-
-      for count in self.transferred[throttle_name].values():
-        total += count
-
-      sleep_time = (float(total) / self.throttles[throttle_name]) - duration
-
-      if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
-        break
-
-      logger.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
-                   '(duration=%.1f ms, total=%d)',
-                   thread.getName(), throttle_name,
-                   sleep_time * 1000, duration * 1000, total)
-      self.thread_sleep(sleep_time)
-      if thread.exit_flag:
-        break
-      self._RotateCounts(throttle_name)
-
-  def _RotateCounts(self, throttle_name):
-    """Rotate the transfer counters.
-
-    If sufficient time has passed, then rotate the counters from active to
-    the prior-block of counts.
-
-    This rotation is interlocked to ensure that multiple threads do not
-    over-rotate the counts.
-
-    Args:
-      throttle_name: The name of the throttle to rotate.
-    """
-    self.VerifyName(throttle_name)
-    self.rotate_mutex[throttle_name].acquire()
-    try:
-      next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
-      if next_rotate_time >= self.get_time():
-        return
-
-      for name, count in self.transferred[throttle_name].items():
-
-
-        self.prior_block[throttle_name][name] = count
-        self.transferred[throttle_name][name] = 0
-
-        self.totals[throttle_name][name] += count
-
-      self.last_rotate[throttle_name] = self.get_time()
-
-    finally:
-      self.rotate_mutex[throttle_name].release()
-
-  def TotalTransferred(self, throttle_name):
-    """Return the total transferred, and over what period.
-
-    Args:
-      throttle_name: The name of the throttle to total.
-
-    Returns:
-      A tuple of the total count and running time for the given throttle name.
-    """
-    total = 0
-    for count in self.totals[throttle_name].values():
-      total += count
-    for count in self.transferred[throttle_name].values():
-      total += count
-    return total, self.get_time() - self.start_time
-
-
 class _ThreadBase(threading.Thread):
   """Provide some basic features for the threads used in the uploader.
 
@@ -1993,18 +1365,29 @@
 
     self.exit_flag = False
     self.error = None
+    self.traceback = None
 
   def run(self):
     """Perform the work of the thread."""
-    logger.info('[%s] %s: started', self.getName(), self.__class__.__name__)
+    logger.debug('[%s] %s: started', self.getName(), self.__class__.__name__)
 
     try:
       self.PerformWork()
     except:
-      self.error = sys.exc_info()[1]
+      self.SetError()
       logger.exception('[%s] %s:', self.getName(), self.__class__.__name__)
 
-    logger.info('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+    logger.debug('[%s] %s: exiting', self.getName(), self.__class__.__name__)
+
+  def SetError(self):
+    """Sets the error and traceback information for this thread.
+
+    This must be called from an exception handler.
+    """
+    if not self.error:
+      exc_info = sys.exc_info()
+      self.error = exc_info[1]
+      self.traceback = exc_info[2]
 
   def PerformWork(self):
     """Perform the thread-specific work."""
@@ -2014,6 +1397,10 @@
     """If an error is present, then log it."""
     if self.error:
       logger.error('Error in %s: %s', self.GetFriendlyName(), self.error)
+      if self.traceback:
+        logger.debug(''.join(traceback.format_exception(self.error.__class__,
+                                                        self.error,
+                                                        self.traceback)))
 
   def GetFriendlyName(self):
     """Returns a human-friendly description of the thread."""
@@ -2044,292 +1431,12 @@
   return error.reason[0] not in non_fatal_error_codes
 
 
-def PrettyKey(key):
-  """Returns a nice string representation of the given key."""
-  if key is None:
-    return None
-  elif isinstance(key, db.Key):
-    return repr(key.id_or_name())
-  return str(key)
-
-
-class _BulkWorkerThread(_ThreadBase):
-  """A base class for worker threads.
-
-  This thread will read WorkItem instances from the work_queue and upload
-  the entities to the server application. Progress information will be
-  pushed into the progress_queue as the work is being performed.
-
-  If a _BulkWorkerThread encounters a transient error, the entities will be
-  resent, if a fatal error is encoutered the BulkWorkerThread exits.
-
-  Subclasses must provide implementations for PreProcessItem, TransferItem,
-  and ProcessResponse.
-  """
-
-  def __init__(self,
-               work_queue,
-               throttle,
-               thread_gate,
-               request_manager,
-               num_threads,
-               batch_size,
-               state_message,
-               get_time):
-    """Initialize the BulkLoaderThread instance.
-
-    Args:
-      work_queue: A queue containing WorkItems for processing.
-      throttle: A Throttles to control upload bandwidth.
-      thread_gate: A ThreadGate to control number of simultaneous uploads.
-      request_manager: A RequestManager instance.
-      num_threads: The number of threads for parallel transfers.
-      batch_size: The number of entities to transfer per request.
-      state_message: Used for dependency injection.
-      get_time: Used for dependency injection.
-    """
-    _ThreadBase.__init__(self)
-
-    self.work_queue = work_queue
-    self.throttle = throttle
-    self.thread_gate = thread_gate
-    self.request_manager = request_manager
-    self.num_threads = num_threads
-    self.batch_size = batch_size
-    self.state_message = state_message
-    self.get_time = get_time
-
-  def PreProcessItem(self, item):
-    """Performs pre transfer processing on a work item."""
-    raise NotImplementedError()
-
-  def TransferItem(self, item):
-    """Transfers the entities associated with an item.
-
-    Args:
-      item: An item of upload (WorkItem) or download (KeyRange) work.
-
-    Returns:
-      A tuple of (estimated transfer size, response)
-    """
-    raise NotImplementedError()
-
-  def ProcessResponse(self, item, result):
-    """Processes the response from the server application."""
-    raise NotImplementedError()
-
-  def PerformWork(self):
-    """Perform the work of a _BulkWorkerThread."""
-    while not self.exit_flag:
-      transferred = False
-      self.thread_gate.StartWork()
-      try:
-        try:
-          item = self.work_queue.get(block=True, timeout=1.0)
-        except Queue.Empty:
-          continue
-        if item == _THREAD_SHOULD_EXIT:
-          break
-
-        logger.debug('[%s] Got work item %s', self.getName(), item)
-
-        try:
-
-          item.MarkAsTransferring()
-          self.PreProcessItem(item)
-          response = None
-          try:
-            try:
-              t = self.get_time()
-              response = self.TransferItem(item)
-              status = 200
-              transferred = True
-              transfer_time = self.get_time() - t
-              logger.debug('[%s] %s Transferred %d entities in %0.1f seconds',
-                           self.getName(), item, item.count, transfer_time)
-              self.throttle.AddTransfer(RECORDS, item.count)
-            except (db.InternalError, db.NotSavedError, db.Timeout,
-                    apiproxy_errors.OverQuotaError,
-                    apiproxy_errors.DeadlineExceededError), e:
-              logger.exception('Caught non-fatal datastore error: %s', e)
-            except urllib2.HTTPError, e:
-              status = e.code
-              if status == 403 or (status >= 500 and status < 600):
-                logger.exception('Caught non-fatal HTTP error: %d %s',
-                                 status, e.msg)
-              else:
-                raise e
-            except urllib2.URLError, e:
-              if IsURLErrorFatal(e):
-                raise e
-              else:
-                logger.exception('Caught non-fatal URL error: %s', e.reason)
-
-            self.ProcessResponse(item, response)
-
-          except:
-            self.error = sys.exc_info()[1]
-            logger.exception('[%s] %s: caught exception %s', self.getName(),
-                             self.__class__.__name__, str(sys.exc_info()))
-            raise
-
-        finally:
-          if transferred:
-            item.MarkAsTransferred()
-            self.work_queue.task_done()
-            self.thread_gate.TransferSuccess(transfer_time)
-          else:
-            item.MarkAsError()
-            try:
-              self.work_queue.reput(item, block=False)
-            except Queue.Full:
-              logger.error('[%s] Failed to reput work item.', self.getName())
-              raise Error('Failed to reput work item')
-            self.thread_gate.DecreaseWorkers()
-          logger.info('%s %s',
-                      item,
-                      self.state_message(item.state))
-
-      finally:
-        self.thread_gate.FinishWork()
-
-
-  def GetFriendlyName(self):
-    """Returns a human-friendly name for this thread."""
-    return 'worker [%s]' % self.getName()
-
-
-class BulkLoaderThread(_BulkWorkerThread):
-  """A thread which transmits entities to the server application.
-
-  This thread will read WorkItem instances from the work_queue and upload
-  the entities to the server application. Progress information will be
-  pushed into the progress_queue as the work is being performed.
-
-  If a BulkLoaderThread encounters a transient error, the entities will be
-  resent, if a fatal error is encoutered the BulkLoaderThread exits.
-  """
-
-  def __init__(self,
-               work_queue,
-               throttle,
-               thread_gate,
-               request_manager,
-               num_threads,
-               batch_size,
-               get_time=time.time):
-    """Initialize the BulkLoaderThread instance.
-
-    Args:
-      work_queue: A queue containing WorkItems for processing.
-      throttle: A Throttles to control upload bandwidth.
-      thread_gate: A ThreadGate to control number of simultaneous uploads.
-      request_manager: A RequestManager instance.
-      num_threads: The number of threads for parallel transfers.
-      batch_size: The number of entities to transfer per request.
-      get_time: Used for dependency injection.
-    """
-    _BulkWorkerThread.__init__(self,
-                               work_queue,
-                               throttle,
-                               thread_gate,
-                               request_manager,
-                               num_threads,
-                               batch_size,
-                               ImportStateMessage,
-                               get_time)
-
-  def PreProcessItem(self, item):
-    """Performs pre transfer processing on a work item."""
-    if item and not item.content:
-      item.content = self.request_manager.EncodeContent(item.rows)
-
-  def TransferItem(self, item):
-    """Transfers the entities associated with an item.
-
-    Args:
-      item: An item of upload (WorkItem) work.
-
-    Returns:
-      A tuple of (estimated transfer size, response)
-    """
-    return self.request_manager.PostEntities(item)
-
-  def ProcessResponse(self, item, response):
-    """Processes the response from the server application."""
-    pass
-
-
-class BulkExporterThread(_BulkWorkerThread):
-  """A thread which recieved entities to the server application.
-
-  This thread will read KeyRange instances from the work_queue and export
-  the entities from the server application. Progress information will be
-  pushed into the progress_queue as the work is being performed.
-
-  If a BulkExporterThread encounters an error when trying to post data,
-  the thread will exit and cause the application to terminate.
-  """
-
-  def __init__(self,
-               work_queue,
-               throttle,
-               thread_gate,
-               request_manager,
-               num_threads,
-               batch_size,
-               get_time=time.time):
-
-    """Initialize the BulkExporterThread instance.
-
-    Args:
-      work_queue: A queue containing KeyRanges for processing.
-      throttle: A Throttles to control upload bandwidth.
-      thread_gate: A ThreadGate to control number of simultaneous uploads.
-      request_manager: A RequestManager instance.
-      num_threads: The number of threads for parallel transfers.
-      batch_size: The number of entities to transfer per request.
-      get_time: Used for dependency injection.
-    """
-    _BulkWorkerThread.__init__(self,
-                               work_queue,
-                               throttle,
-                               thread_gate,
-                               request_manager,
-                               num_threads,
-                               batch_size,
-                               ExportStateMessage,
-                               get_time)
-
-  def PreProcessItem(self, unused_item):
-    """Performs pre transfer processing on a work item."""
-    pass
-
-  def TransferItem(self, item):
-    """Transfers the entities associated with an item.
-
-    Args:
-      item: An item of download (KeyRange) work.
-
-    Returns:
-      A tuple of (estimated transfer size, response)
-    """
-    return self.request_manager.GetEntities(item)
-
-  def ProcessResponse(self, item, export_result):
-    """Processes the response from the server application."""
-    if export_result:
-      item.Process(export_result, self.num_threads, self.batch_size,
-                   self.work_queue)
-    item.state = STATE_GOT
-
-
 class DataSourceThread(_ThreadBase):
   """A thread which reads WorkItems and pushes them into queue.
 
   This thread will read/consume WorkItems from a generator (produced by
   the generator factory). These WorkItems will then be pushed into the
-  work_queue. Note that reading will block if/when the work_queue becomes
+  thread_pool. Note that reading will block if/when the thread_pool becomes
   full. Information on content consumed from the generator will be pushed
   into the progress_queue.
   """
@@ -2337,14 +1444,16 @@
   NAME = 'data source thread'
 
   def __init__(self,
-               work_queue,
+               request_manager,
+               thread_pool,
                progress_queue,
                workitem_generator_factory,
                progress_generator_factory):
     """Initialize the DataSourceThread instance.
 
     Args:
-      work_queue: A queue containing WorkItems for processing.
+      request_manager: A RequestManager instance.
+      thread_pool: An AdaptiveThreadPool instance.
       progress_queue: A queue used for tracking progress information.
       workitem_generator_factory: A factory that creates a WorkItem generator
       progress_generator_factory: A factory that creates a generator which
@@ -2353,7 +1462,8 @@
     """
     _ThreadBase.__init__(self)
 
-    self.work_queue = work_queue
+    self.request_manager = request_manager
+    self.thread_pool = thread_pool
     self.progress_queue = progress_queue
     self.workitem_generator_factory = workitem_generator_factory
     self.progress_generator_factory = progress_generator_factory
@@ -2366,7 +1476,8 @@
     else:
       progress_gen = None
 
-    content_gen = self.workitem_generator_factory(self.progress_queue,
+    content_gen = self.workitem_generator_factory(self.request_manager,
+                                                  self.progress_queue,
                                                   progress_gen)
 
     self.xfer_count = 0
@@ -2378,7 +1489,7 @@
 
       while not self.exit_flag:
         try:
-          self.work_queue.put(item, block=True, timeout=1.0)
+          self.thread_pool.SubmitItem(item, block=True, timeout=1.0)
           self.entity_count += item.count
           break
         except Queue.Full:
@@ -2526,6 +1637,70 @@
     self.update_cursor = self.secondary_conn.cursor()
 
 
+zero_matcher = re.compile(r'\x00')
+
+zero_one_matcher = re.compile(r'\x00\x01')
+
+
+def KeyStr(key):
+  """Returns a string to represent a key, preserving ordering.
+
+  Unlike datastore.Key.__str__(), we have the property:
+
+    key1 < key2 ==> KeyStr(key1) < KeyStr(key2)
+
+  The key string is constructed from the key path as follows:
+    (1) Strings are prepended with ':' and numeric id's are padded to
+        20 digits.
+    (2) Any null characters (u'\0') present are replaced with u'\0\1'
+    (3) The sequence u'\0\0' is used to separate each component of the path.
+
+  (1) assures that names and ids compare properly, while (2) and (3) enforce
+  the part-by-part comparison of pieces of the path.
+
+  Args:
+    key: A datastore.Key instance.
+
+  Returns:
+    A string representation of the key, which preserves ordering.
+  """
+  assert isinstance(key, datastore.Key)
+  path = key.to_path()
+
+  out_path = []
+  for part in path:
+    if isinstance(part, (int, long)):
+      part = '%020d' % part
+    else:
+      part = ':%s' % part
+
+    out_path.append(zero_matcher.sub(u'\0\1', part))
+
+  out_str = u'\0\0'.join(out_path)
+
+  return out_str
+
+
+def StrKey(key_str):
+  """The inverse of the KeyStr function.
+
+  Args:
+    key_str: A string in the range of KeyStr.
+
+  Returns:
+    A datastore.Key instance k, such that KeyStr(k) == key_str.
+  """
+  parts = key_str.split(u'\0\0')
+  for i in xrange(len(parts)):
+    if parts[i][0] == ':':
+      part = parts[i][1:]
+      part = zero_one_matcher.sub(u'\0', part)
+      parts[i] = part
+    else:
+      parts[i] = int(parts[i])
+  return datastore.Key.from_path(*parts)
+
+
 class ResultDatabase(_Database):
   """Persistently record all the entities downloaded during an export.
 
@@ -2544,7 +1719,7 @@
     """
     self.complete = False
     create_table = ('create table result (\n'
-                    'id TEXT primary key,\n'
+                    'id BLOB primary key,\n'
                     'value BLOB not null)')
 
     _Database.__init__(self,
@@ -2560,34 +1735,37 @@
       self.existing_count = 0
     self.count = self.existing_count
 
-  def _StoreEntity(self, entity_id, value):
+  def _StoreEntity(self, entity_id, entity):
     """Store an entity in the result database.
 
     Args:
-      entity_id: A db.Key for the entity.
-      value: A string of the contents of the entity.
+      entity_id: A datastore.Key for the entity.
+      entity: The entity to store.
 
     Returns:
       True if this entities is not already present in the result database.
     """
 
     assert _RunningInThread(self.secondary_thread)
-    assert isinstance(entity_id, db.Key)
-
-    entity_id = entity_id.id_or_name()
+    assert isinstance(entity_id, datastore.Key), (
+        'expected a datastore.Key, got a %s' % entity_id.__class__.__name__)
+
+    key_str = buffer(KeyStr(entity_id).encode('utf-8'))
     self.insert_cursor.execute(
-        'select count(*) from result where id = ?', (unicode(entity_id),))
+        'select count(*) from result where id = ?', (key_str,))
+
     already_present = self.insert_cursor.fetchone()[0]
     result = True
     if already_present:
       result = False
       self.insert_cursor.execute('delete from result where id = ?',
-                                 (unicode(entity_id),))
+                                 (key_str,))
     else:
       self.count += 1
+    value = entity.Encode()
     self.insert_cursor.execute(
         'insert into result (id, value) values (?, ?)',
-        (unicode(entity_id), buffer(value)))
+        (key_str, buffer(value)))
     return result
 
   def StoreEntities(self, keys, entities):
@@ -2603,9 +1781,9 @@
     self._OpenSecondaryConnection()
     t = time.time()
     count = 0
-    for entity_id, value in zip(keys,
-                                entities):
-      if self._StoreEntity(entity_id, value):
+    for entity_id, entity in zip(keys,
+                                 entities):
+      if self._StoreEntity(entity_id, entity):
         count += 1
     logger.debug('%s insert: delta=%.3f',
                  self.db_filename,
@@ -2627,7 +1805,8 @@
         'select id, value from result order by id')
 
     for unused_entity_id, entity in cursor:
-      yield cPickle.loads(str(entity))
+      entity_proto = entity_pb.EntityProto(contents=entity)
+      yield datastore.Entity._FromPb(entity_proto)
 
 
 class _ProgressDatabase(_Database):
@@ -2723,9 +1902,16 @@
     self._OpenSecondaryConnection()
 
     assert _RunningInThread(self.secondary_thread)
-    assert not key_start or isinstance(key_start, self.py_type)
-    assert not key_end or isinstance(key_end, self.py_type), '%s is a %s' % (
-        key_end, key_end.__class__)
+    assert (not key_start) or isinstance(key_start, self.py_type), (
+        '%s is a %s, %s expected %s' % (key_start,
+                                        key_start.__class__,
+                                        self.__class__.__name__,
+                                        self.py_type))
+    assert (not key_end) or isinstance(key_end, self.py_type), (
+        '%s is a %s, %s expected %s' % (key_end,
+                                        key_end.__class__,
+                                        self.__class__.__name__,
+                                        self.py_type))
     assert KeyLEQ(key_start, key_end), '%s not less than %s' % (
         repr(key_start), repr(key_end))
 
@@ -2843,7 +2029,7 @@
     _ProgressDatabase.__init__(self,
                                db_filename,
                                'TEXT',
-                               db.Key,
+                               datastore.Key,
                                signature,
                                commit_periodicity=1)
 
@@ -3011,34 +2197,72 @@
     exporter.output_entities(self.result_db.AllEntities())
 
   def UpdateProgress(self, item):
-    """Update the state of the given KeyRange.
+    """Update the state of the given KeyRangeItem.
 
     Args:
       item: A KeyRange instance.
     """
     if item.state == STATE_GOT:
-      count = self.result_db.StoreEntities(item.export_result.keys,
-                                           item.export_result.entities)
+      count = self.result_db.StoreEntities(item.download_result.keys,
+                                           item.download_result.entities)
       self.db.DeleteKey(item.progress_key)
       self.entities_transferred += count
     else:
       self.db.UpdateState(item.progress_key, item.state)
 
 
+class MapperProgressThread(_ProgressThreadBase):
+  """A thread to record progress information for maps over the datastore."""
+
+  def __init__(self, kind, progress_queue, progress_db):
+    """Initialize the MapperProgressThread instance.
+
+    Args:
+      kind: The kind of entities being stored in the database.
+      progress_queue: A Queue used for tracking progress information.
+      progress_db: The database for tracking progress information; should
+        be an instance of ProgressDatabase.
+    """
+    _ProgressThreadBase.__init__(self, progress_queue, progress_db)
+
+    self.kind = kind
+    self.mapper = Mapper.RegisteredMapper(self.kind)
+
+  def EntitiesTransferred(self):
+    """Return the total number of unique entities transferred."""
+    return self.entities_transferred
+
+  def WorkFinished(self):
+    """Perform actions after map is complete."""
+    pass
+
+  def UpdateProgress(self, item):
+    """Update the state of the given KeyRangeItem.
+
+    Args:
+      item: A KeyRange instance.
+    """
+    if item.state == STATE_GOT:
+      self.entities_transferred += item.count
+      self.db.DeleteKey(item.progress_key)
+    else:
+      self.db.UpdateState(item.progress_key, item.state)
+
+
 def ParseKey(key_string):
-  """Turn a key stored in the database into a db.Key or None.
+  """Turn a key stored in the database into a Key or None.
 
   Args:
-    key_string: The string representation of a db.Key.
+    key_string: The string representation of a Key.
 
   Returns:
-    A db.Key instance or None
+    A datastore.Key instance or None
   """
   if not key_string:
     return None
   if key_string == 'None':
     return None
-  return db.Key(encoded=key_string)
+  return datastore.Key(encoded=key_string)
 
 
 def Validate(value, typ):
@@ -3097,9 +2321,7 @@
   def __init__(self, kind, properties):
     """Constructor.
 
-    Populates this Loader's kind and properties map. Also registers it with
-    the bulk loader, so that all you need to do is instantiate your Loader,
-    and the bulkload handler will automatically use it.
+    Populates this Loader's kind and properties map.
 
     Args:
       kind: a string containing the entity kind that this loader handles
@@ -3139,7 +2361,11 @@
 
   @staticmethod
   def RegisterLoader(loader):
-
+    """Register loader and the Loader instance for its kind.
+
+    Args:
+      loader: A Loader instance.
+    """
     Loader.__loaders[loader.kind] = loader
 
   def alias_old_names(self):
@@ -3166,7 +2392,7 @@
     Args:
       values: list/tuple of str
       key_name: if provided, the name for the (single) resulting entity
-      parent: A db.Key instance for the parent, or None
+      parent: A datastore.Key instance for the parent, or None
 
     Returns:
       list of db.Model
@@ -3222,7 +2448,7 @@
     server generated numeric key), or a string which neither starts
     with a digit nor has the form __*__ (see
     http://code.google.com/appengine/docs/python/datastore/keysandentitygroups.html),
-    or a db.Key instance.
+    or a datastore.Key instance.
 
     If you generate your own string keys, keep in mind:
 
@@ -3305,6 +2531,51 @@
     return Loader.__loaders[kind]
 
 
+class RestoreThread(_ThreadBase):
+  """A thread to read saved entity_pbs from sqlite3."""
+  NAME = 'RestoreThread'
+  _ENTITIES_DONE = 'Entities Done'
+
+  def __init__(self, queue, filename):
+    _ThreadBase.__init__(self)
+    self.queue = queue
+    self.filename = filename
+
+  def PerformWork(self):
+    db_conn = sqlite3.connect(self.filename)
+    cursor = db_conn.cursor()
+    cursor.execute('select id, value from result')
+    for entity_id, value in cursor:
+      self.queue.put([entity_id, value], block=True)
+    self.queue.put(RestoreThread._ENTITIES_DONE, block=True)
+
+
+class RestoreLoader(Loader):
+  """A Loader which imports protobuffers from a file."""
+
+  def __init__(self, kind):
+    self.kind = kind
+
+  def initialize(self, filename, loader_opts):
+    CheckFile(filename)
+    self.queue = Queue.Queue(1000)
+    restore_thread = RestoreThread(self.queue, filename)
+    restore_thread.start()
+
+  def generate_records(self, filename):
+    while True:
+      record = self.queue.get(block=True)
+      if id(record) == id(RestoreThread._ENTITIES_DONE):
+        break
+      yield record
+
+  def create_entity(self, values, key_name=None, parent=None):
+    key = StrKey(unicode(values[0], 'utf-8'))
+    entity_proto = entity_pb.EntityProto(contents=str(values[1]))
+    entity_proto.mutable_key().CopyFrom(key._Key__reference)
+    return datastore.Entity._FromPb(entity_proto)
+
+
 class Exporter(object):
   """A base class for serializing datastore entities.
 
@@ -3326,9 +2597,7 @@
   def __init__(self, kind, properties):
     """Constructor.
 
-    Populates this Exporters's kind and properties map. Also registers
-    it so that all you need to do is instantiate your Exporter, and
-    the bulkload handler will automatically use it.
+    Populates this Exporters's kind and properties map.
 
     Args:
       kind: a string containing the entity kind that this exporter handles
@@ -3370,7 +2639,11 @@
 
   @staticmethod
   def RegisterExporter(exporter):
-
+    """Register exporter and the Exporter instance for its kind.
+
+    Args:
+      exporter: A Exporter instance.
+    """
     Exporter.__exporters[exporter.kind] = exporter
 
   def __ExtractProperties(self, entity):
@@ -3388,7 +2661,7 @@
     encoding = []
     for name, fn, default in self.__properties:
       try:
-        encoding.append(fn(getattr(entity, name)))
+        encoding.append(fn(entity[name]))
       except AttributeError:
         if default is None:
           raise MissingPropertyError(name)
@@ -3468,6 +2741,87 @@
     return Exporter.__exporters[kind]
 
 
+class DumpExporter(Exporter):
+  """An exporter which dumps protobuffers to a file."""
+
+  def __init__(self, kind, result_db_filename):
+    self.kind = kind
+    self.result_db_filename = result_db_filename
+
+  def output_entities(self, entity_generator):
+    shutil.copyfile(self.result_db_filename, self.output_filename)
+
+
+class MapperRetry(Error):
+  """An exception that indicates a non-fatal error during mapping."""
+
+
+class Mapper(object):
+  """A base class for serializing datastore entities.
+
+  To add a handler for exporting an entity kind from your datastore,
+  write a subclass of this class that calls Mapper.__init__ from your
+  class's __init__.
+
+  You need to implement to batch_apply or apply method on your subclass
+  for the map to do anything.
+  """
+
+  __mappers = {}
+  kind = None
+
+  def __init__(self, kind):
+    """Constructor.
+
+    Populates this Mappers's kind.
+
+    Args:
+      kind: a string containing the entity kind that this mapper handles
+    """
+    Validate(kind, basestring)
+    self.kind = kind
+
+    GetImplementationClass(kind)
+
+  @staticmethod
+  def RegisterMapper(mapper):
+    """Register mapper and the Mapper instance for its kind.
+
+    Args:
+      mapper: A Mapper instance.
+    """
+    Mapper.__mappers[mapper.kind] = mapper
+
+  def initialize(self, mapper_opts):
+    """Performs initialization.
+
+    Args:
+      mapper_opts: The string given as the --mapper_opts flag argument.
+    """
+    pass
+
+  def finalize(self):
+    """Performs finalization actions after the download completes."""
+    pass
+
+  def apply(self, entity):
+    print 'Default map function doing nothing to %s' % entity
+
+  def batch_apply(self, entities):
+    for entity in entities:
+      self.apply(entity)
+
+  @staticmethod
+  def RegisteredMappers():
+    """Returns a dictionary of the mapper instances that have been created."""
+    return dict(Mapper.__mappers)
+
+  @staticmethod
+  def RegisteredMapper(kind):
+    """Returns an mapper instance for the given kind if it exists."""
+    return Mapper.__mappers[kind]
+
+
 class QueueJoinThread(threading.Thread):
   """A thread that joins a queue and exits.
 
@@ -3492,7 +2846,7 @@
 
 def InterruptibleQueueJoin(queue,
                            thread_local,
-                           thread_gate,
+                           thread_pool,
                            queue_join_thread_factory=QueueJoinThread,
                            check_workers=True):
   """Repeatedly joins the given ReQueue or Queue.Queue with short timeout.
@@ -3502,7 +2856,7 @@
   Args:
     queue: A Queue.Queue or ReQueue instance.
     thread_local: A threading.local instance which indicates interrupts.
-    thread_gate: A ThreadGate instance.
+    thread_pool: An AdaptiveThreadPool instance.
     queue_join_thread_factory: Used for dependency injection.
     check_workers: Whether to interrupt the join on worker death.
 
@@ -3519,41 +2873,29 @@
       logger.debug('Queue join interrupted')
       return False
     if check_workers:
-      for worker_thread in thread_gate.Threads():
+      for worker_thread in thread_pool.Threads():
         if not worker_thread.isAlive():
           return False
 
 
-def ShutdownThreads(data_source_thread, work_queue, thread_gate):
+def ShutdownThreads(data_source_thread, thread_pool):
   """Shuts down the worker and data source threads.
 
   Args:
     data_source_thread: A running DataSourceThread instance.
-    work_queue: The work queue.
-    thread_gate: A ThreadGate instance with workers registered.
+    thread_pool: An AdaptiveThreadPool instance with workers registered.
   """
   logger.info('An error occurred. Shutting down...')
 
   data_source_thread.exit_flag = True
 
-  for thread in thread_gate.Threads():
-    thread.exit_flag = True
-
-  for unused_thread in thread_gate.Threads():
-    thread_gate.EnableThread()
+  thread_pool.Shutdown()
 
   data_source_thread.join(timeout=3.0)
   if data_source_thread.isAlive():
     logger.warn('%s hung while trying to exit',
                 data_source_thread.GetFriendlyName())
 
-  while not work_queue.empty():
-    try:
-      unused_item = work_queue.get_nowait()
-      work_queue.task_done()
-    except Queue.Empty:
-      pass
-
 
 class BulkTransporterApp(object):
   """Class to wrap bulk transport application functionality."""
@@ -3563,13 +2905,12 @@
                input_generator_factory,
                throttle,
                progress_db,
-               workerthread_factory,
                progresstrackerthread_factory,
                max_queue_size=DEFAULT_QUEUE_SIZE,
                request_manager_factory=RequestManager,
                datasourcethread_factory=DataSourceThread,
-               work_queue_factory=ReQueue,
-               progress_queue_factory=Queue.Queue):
+               progress_queue_factory=Queue.Queue,
+               thread_pool_factory=adaptive_thread_pool.AdaptiveThreadPool):
     """Instantiate a BulkTransporterApp.
 
     Uploads or downloads data to or from application using HTTP requests.
@@ -3584,13 +2925,12 @@
       input_generator_factory: A factory that creates a WorkItem generator.
       throttle: A Throttle instance.
       progress_db: The database to use for replaying/recording progress.
-      workerthread_factory: A factory for worker threads.
       progresstrackerthread_factory: Used for dependency injection.
       max_queue_size: Maximum size of the queues before they should block.
       request_manager_factory: Used for dependency injection.
       datasourcethread_factory: Used for dependency injection.
-      work_queue_factory: Used for dependency injection.
       progress_queue_factory: Used for dependency injection.
+      thread_pool_factory: Used for dependency injection.
     """
     self.app_id = arg_dict['app_id']
     self.post_url = arg_dict['url']
@@ -3600,15 +2940,15 @@
     self.num_threads = arg_dict['num_threads']
     self.email = arg_dict['email']
     self.passin = arg_dict['passin']
+    self.dry_run = arg_dict['dry_run']
     self.throttle = throttle
     self.progress_db = progress_db
-    self.workerthread_factory = workerthread_factory
     self.progresstrackerthread_factory = progresstrackerthread_factory
     self.max_queue_size = max_queue_size
     self.request_manager_factory = request_manager_factory
     self.datasourcethread_factory = datasourcethread_factory
-    self.work_queue_factory = work_queue_factory
     self.progress_queue_factory = progress_queue_factory
+    self.thread_pool_factory = thread_pool_factory
     (scheme,
      self.host_port, self.url_path,
      unused_query, unused_fragment) = urlparse.urlsplit(self.post_url)
@@ -3623,13 +2963,13 @@
     Returns:
       Error code suitable for sys.exit, e.g. 0 on success, 1 on failure.
     """
-    thread_gate = ThreadGate(True)
+    self.error = False
+    thread_pool = self.thread_pool_factory(
+        self.num_threads, queue_size=self.max_queue_size)
 
     self.throttle.Register(threading.currentThread())
     threading.currentThread().exit_flag = False
 
-    work_queue = self.work_queue_factory(self.max_queue_size)
-
     progress_queue = self.progress_queue_factory(self.max_queue_size)
     request_manager = self.request_manager_factory(self.app_id,
                                                    self.host_port,
@@ -3639,27 +2979,23 @@
                                                    self.batch_size,
                                                    self.secure,
                                                    self.email,
-                                                   self.passin)
+                                                   self.passin,
+                                                   self.dry_run)
     try:
       request_manager.Authenticate()
     except Exception, e:
+      self.error = True
       if not isinstance(e, urllib2.HTTPError) or (
           e.code != 302 and e.code != 401):
         logger.exception('Exception during authentication')
       raise AuthenticationError()
     if (request_manager.auth_called and
         not request_manager.authenticated):
+      self.error = True
       raise AuthenticationError('Authentication failed')
 
-    for unused_idx in xrange(self.num_threads):
-      thread = self.workerthread_factory(work_queue,
-                                         self.throttle,
-                                         thread_gate,
-                                         request_manager,
-                                         self.num_threads,
-                                         self.batch_size)
+    for thread in thread_pool.Threads():
       self.throttle.Register(thread)
-      thread_gate.Register(thread)
 
     self.progress_thread = self.progresstrackerthread_factory(
         progress_queue, self.progress_db)
@@ -3671,7 +3007,8 @@
       progress_generator_factory = None
 
     self.data_source_thread = (
-        self.datasourcethread_factory(work_queue,
+        self.datasourcethread_factory(request_manager,
+                                      thread_pool,
                                       progress_queue,
                                       self.input_generator_factory,
                                       progress_generator_factory))
@@ -3682,60 +3019,54 @@
     def Interrupt(unused_signum, unused_frame):
       """Shutdown gracefully in response to a signal."""
       thread_local.shut_down = True
+      self.error = True
 
     signal.signal(signal.SIGINT, Interrupt)
 
     self.progress_thread.start()
     self.data_source_thread.start()
-    for thread in thread_gate.Threads():
-      thread.start()
 
 
     while not thread_local.shut_down:
       self.data_source_thread.join(timeout=0.25)
 
       if self.data_source_thread.isAlive():
-        for thread in list(thread_gate.Threads()) + [self.progress_thread]:
+        for thread in list(thread_pool.Threads()) + [self.progress_thread]:
           if not thread.isAlive():
             logger.info('Unexpected thread death: %s', thread.getName())
             thread_local.shut_down = True
+            self.error = True
             break
       else:
         break
 
-    if thread_local.shut_down:
-      ShutdownThreads(self.data_source_thread, work_queue, thread_gate)
-
     def _Join(ob, msg):
       logger.debug('Waiting for %s...', msg)
       if isinstance(ob, threading.Thread):
         ob.join(timeout=3.0)
         if ob.isAlive():
-          logger.debug('Joining %s failed', ob.GetFriendlyName())
+          logger.debug('Joining %s failed', ob)
         else:
           logger.debug('... done.')
       elif isinstance(ob, (Queue.Queue, ReQueue)):
-        if not InterruptibleQueueJoin(ob, thread_local, thread_gate):
-          ShutdownThreads(self.data_source_thread, work_queue, thread_gate)
+        if not InterruptibleQueueJoin(ob, thread_local, thread_pool):
+          ShutdownThreads(self.data_source_thread, thread_pool)
       else:
         ob.join()
         logger.debug('... done.')
 
-    _Join(work_queue, 'work_queue to flush')
-
-    for unused_thread in thread_gate.Threads():
-      work_queue.put(_THREAD_SHOULD_EXIT)
-
-    for unused_thread in thread_gate.Threads():
-      thread_gate.EnableThread()
-
-    for thread in thread_gate.Threads():
-      _Join(thread, 'thread [%s] to terminate' % thread.getName())
-
-      thread.CheckError()
+    if self.data_source_thread.error or thread_local.shut_down:
+      ShutdownThreads(self.data_source_thread, thread_pool)
+    else:
+      _Join(thread_pool.requeue, 'worker threads to finish')
+
+    thread_pool.Shutdown()
+    thread_pool.JoinThreads()
+    thread_pool.CheckErrors()
+    print ''
 
     if self.progress_thread.isAlive():
-      InterruptibleQueueJoin(progress_queue, thread_local, thread_gate,
+      InterruptibleQueueJoin(progress_queue, thread_local, thread_pool,
                              check_workers=False)
     else:
       logger.warn('Progress thread exited prematurely')
@@ -3763,9 +3094,10 @@
 
   def ReportStatus(self):
     """Display a message reporting the final status of the transfer."""
-    total_up, duration = self.throttle.TotalTransferred(BANDWIDTH_UP)
+    total_up, duration = self.throttle.TotalTransferred(
+        remote_api_throttle.BANDWIDTH_UP)
     s_total_up, unused_duration = self.throttle.TotalTransferred(
-        HTTPS_BANDWIDTH_UP)
+        remote_api_throttle.HTTPS_BANDWIDTH_UP)
     total_up += s_total_up
     total = total_up
     logger.info('%d entites total, %d previously transferred',
@@ -3793,18 +3125,49 @@
 
   def ReportStatus(self):
     """Display a message reporting the final status of the transfer."""
-    total_down, duration = self.throttle.TotalTransferred(BANDWIDTH_DOWN)
+    total_down, duration = self.throttle.TotalTransferred(
+        remote_api_throttle.BANDWIDTH_DOWN)
     s_total_down, unused_duration = self.throttle.TotalTransferred(
-        HTTPS_BANDWIDTH_DOWN)
+        remote_api_throttle.HTTPS_BANDWIDTH_DOWN)
     total_down += s_total_down
     total = total_down
     existing_count = self.progress_thread.existing_count
     xfer_count = self.progress_thread.EntitiesTransferred()
     logger.info('Have %d entities, %d previously transferred',
-                xfer_count + existing_count, existing_count)
+                xfer_count, existing_count)
     logger.info('%d entities (%d bytes) transferred in %.1f seconds',
                 xfer_count, total, duration)
-    return 0
+    if self.error:
+      return 1
+    else:
+      return 0
+
+
+class BulkMapperApp(BulkTransporterApp):
+  """Class to encapsulate bulk map functionality."""
+
+  def __init__(self, *args, **kwargs):
+    BulkTransporterApp.__init__(self, *args, **kwargs)
+
+  def ReportStatus(self):
+    """Display a message reporting the final status of the transfer."""
+    total_down, duration = self.throttle.TotalTransferred(
+        remote_api_throttle.BANDWIDTH_DOWN)
+    s_total_down, unused_duration = self.throttle.TotalTransferred(
+        remote_api_throttle.HTTPS_BANDWIDTH_DOWN)
+    total_down += s_total_down
+    total = total_down
+    xfer_count = self.progress_thread.EntitiesTransferred()
+    logger.info('The following may be inaccurate if any mapper tasks '
+                'encountered errors and had to be retried.')
+    logger.info('Applied mapper to %s entities.',
+                 xfer_count)
+    logger.info('%s entities (%s bytes) transferred in %.1f seconds',
+                 xfer_count, total, duration)
+    if self.error:
+      return 1
+    else:
+      return 0
 
 
 def PrintUsageExit(code):
@@ -3843,18 +3206,24 @@
              'loader_opts=',
              'exporter_opts=',
              'log_file=',
+             'mapper_opts=',
              'email=',
              'passin',
+             'map',
+             'dry_run',
+             'dump',
+             'restore',
              ]
 
 
-def ParseArguments(argv):
+def ParseArguments(argv, die_fn=lambda: PrintUsageExit(1)):
   """Parses command-line arguments.
 
   Prints out a help message if -h or --help is supplied.
 
   Args:
     argv: List of command-line arguments.
+    die_fn: Function to invoke to end the program.
 
   Returns:
     A dictionary containing the value of command-line options.
@@ -3867,11 +3236,11 @@
   arg_dict = {}
 
   arg_dict['url'] = REQUIRED_OPTION
-  arg_dict['filename'] = REQUIRED_OPTION
-  arg_dict['config_file'] = REQUIRED_OPTION
-  arg_dict['kind'] = REQUIRED_OPTION
-
-  arg_dict['batch_size'] = DEFAULT_BATCH_SIZE
+  arg_dict['filename'] = None
+  arg_dict['config_file'] = None
+  arg_dict['kind'] = None
+
+  arg_dict['batch_size'] = None
   arg_dict['num_threads'] = DEFAULT_THREAD_COUNT
   arg_dict['bandwidth_limit'] = DEFAULT_BANDWIDTH_LIMIT
   arg_dict['rps_limit'] = DEFAULT_RPS_LIMIT
@@ -3889,6 +3258,11 @@
   arg_dict['log_file'] = None
   arg_dict['email'] = None
   arg_dict['passin'] = False
+  arg_dict['mapper_opts'] = None
+  arg_dict['map'] = False
+  arg_dict['dry_run'] = False
+  arg_dict['dump'] = False
+  arg_dict['restore'] = False
 
   def ExpandFilename(filename):
     """Expand shell variables and ~usernames in filename."""
@@ -3938,26 +3312,39 @@
     elif option == '--exporter_opts':
       arg_dict['exporter_opts'] = value
     elif option == '--log_file':
-      arg_dict['log_file'] = value
+      arg_dict['log_file'] = ExpandFilename(value)
     elif option == '--email':
       arg_dict['email'] = value
     elif option == '--passin':
       arg_dict['passin'] = True
-
-  return ProcessArguments(arg_dict, die_fn=lambda: PrintUsageExit(1))
+    elif option == '--map':
+      arg_dict['map'] = True
+    elif option == '--mapper_opts':
+      arg_dict['mapper_opts'] = value
+    elif option == '--dry_run':
+      arg_dict['dry_run'] = True
+    elif option == '--dump':
+      arg_dict['dump'] = True
+    elif option == '--restore':
+      arg_dict['restore'] = True
+
+  return ProcessArguments(arg_dict, die_fn=die_fn)
 
 
 def ThrottleLayout(bandwidth_limit, http_limit, rps_limit):
   """Return a dictionary indicating the throttle options."""
-  return {
-      BANDWIDTH_UP: bandwidth_limit,
-      BANDWIDTH_DOWN: bandwidth_limit,
-      REQUESTS: http_limit,
-      HTTPS_BANDWIDTH_UP: bandwidth_limit / 5,
-      HTTPS_BANDWIDTH_DOWN: bandwidth_limit / 5,
-      HTTPS_REQUESTS: http_limit / 5,
-      RECORDS: rps_limit,
-  }
+  bulkloader_limits = dict(remote_api_throttle.NO_LIMITS)
+  bulkloader_limits.update({
+      remote_api_throttle.BANDWIDTH_UP: bandwidth_limit,
+      remote_api_throttle.BANDWIDTH_DOWN: bandwidth_limit,
+      remote_api_throttle.REQUESTS: http_limit,
+      remote_api_throttle.HTTPS_BANDWIDTH_UP: bandwidth_limit,
+      remote_api_throttle.HTTPS_BANDWIDTH_DOWN: bandwidth_limit,
+      remote_api_throttle.HTTPS_REQUESTS: http_limit,
+      remote_api_throttle.ENTITIES_FETCHED: rps_limit,
+      remote_api_throttle.ENTITIES_MODIFIED: rps_limit,
+  })
+  return bulkloader_limits
 
 
 def CheckOutputFile(filename):
@@ -3969,12 +3356,13 @@
   Raises:
     FileExistsError: if the given filename is not found
     FileNotWritableError: if the given filename is not readable.
-  """
-  if os.path.exists(filename):
+    """
+  full_path = os.path.abspath(filename)
+  if os.path.exists(full_path):
     raise FileExistsError('%s: output file exists' % filename)
-  elif not os.access(os.path.dirname(filename), os.W_OK):
+  elif not os.access(os.path.dirname(full_path), os.W_OK):
     raise FileNotWritableError(
-        '%s: not writable' % os.path.dirname(filename))
+        '%s: not writable' % os.path.dirname(full_path))
 
 
 def LoadConfig(config_file_name, exit_fn=sys.exit):
@@ -3999,6 +3387,11 @@
       if hasattr(bulkloader_config, 'exporters'):
         for cls in bulkloader_config.exporters:
           Exporter.RegisterExporter(cls())
+
+      if hasattr(bulkloader_config, 'mappers'):
+        for cls in bulkloader_config.mappers:
+          Mapper.RegisterMapper(cls())
+
     except NameError, e:
       m = re.search(r"[^']*'([^']*)'.*", str(e))
       if m.groups() and m.group(1) == 'Loader':
@@ -4058,9 +3451,12 @@
                    url=None,
                    kind=None,
                    db_filename=None,
+                   perform_map=None,
                    download=None,
                    has_header=None,
-                   result_db_filename=None):
+                   result_db_filename=None,
+                   dump=None,
+                   restore=None):
   """Returns a string that identifies the important options for the database."""
   if download:
     result_db_line = 'result_db: %s' % result_db_filename
@@ -4071,10 +3467,14 @@
   url: %s
   kind: %s
   download: %s
+  map: %s
+  dump: %s
+  restore: %s
   progress_db: %s
   has_header: %s
   %s
-  """ % (app_id, url, kind, download, db_filename, has_header, result_db_line)
+  """ % (app_id, url, kind, download, perform_map, dump, restore, db_filename,
+         has_header, result_db_line)
 
 
 def ProcessArguments(arg_dict,
@@ -4090,6 +3490,8 @@
   """
   app_id = GetArgument(arg_dict, 'app_id', die_fn)
   url = GetArgument(arg_dict, 'url', die_fn)
+  dump = GetArgument(arg_dict, 'dump', die_fn)
+  restore = GetArgument(arg_dict, 'restore', die_fn)
   filename = GetArgument(arg_dict, 'filename', die_fn)
   batch_size = GetArgument(arg_dict, 'batch_size', die_fn)
   kind = GetArgument(arg_dict, 'kind', die_fn)
@@ -4098,21 +3500,18 @@
   result_db_filename = GetArgument(arg_dict, 'result_db_filename', die_fn)
   download = GetArgument(arg_dict, 'download', die_fn)
   log_file = GetArgument(arg_dict, 'log_file', die_fn)
-
-  unused_passin = GetArgument(arg_dict, 'passin', die_fn)
-  unused_email = GetArgument(arg_dict, 'email', die_fn)
-  unused_debug = GetArgument(arg_dict, 'debug', die_fn)
-  unused_num_threads = GetArgument(arg_dict, 'num_threads', die_fn)
-  unused_bandwidth_limit = GetArgument(arg_dict, 'bandwidth_limit', die_fn)
-  unused_rps_limit = GetArgument(arg_dict, 'rps_limit', die_fn)
-  unused_http_limit = GetArgument(arg_dict, 'http_limit', die_fn)
-  unused_auth_domain = GetArgument(arg_dict, 'auth_domain', die_fn)
-  unused_has_headers = GetArgument(arg_dict, 'has_header', die_fn)
-  unused_loader_opts = GetArgument(arg_dict, 'loader_opts', die_fn)
-  unused_exporter_opts = GetArgument(arg_dict, 'exporter_opts', die_fn)
+  perform_map = GetArgument(arg_dict, 'map', die_fn)
 
   errors = []
 
+  if batch_size is None:
+    if download or perform_map:
+      arg_dict['batch_size'] = DEFAULT_DOWNLOAD_BATCH_SIZE
+    else:
+      arg_dict['batch_size'] = DEFAULT_BATCH_SIZE
+  elif batch_size <= 0:
+    errors.append('batch_size must be at least 1')
+
   if db_filename is None:
     arg_dict['db_filename'] = time.strftime(
         'bulkloader-progress-%Y%m%d.%H%M%S.sql3')
@@ -4124,37 +3523,35 @@
   if log_file is None:
     arg_dict['log_file'] = time.strftime('bulkloader-log-%Y%m%d.%H%M%S')
 
-  if batch_size <= 0:
-    errors.append('batch_size must be at least 1')
-
   required = '%s argument required'
 
+  if config_file is None and not dump and not restore:
+    errors.append('One of --config_file, --dump, or --restore is required')
+
   if url is REQUIRED_OPTION:
     errors.append(required % 'url')
 
-  if filename is REQUIRED_OPTION:
+  if not filename and not perform_map:
     errors.append(required % 'filename')
 
-  if kind is REQUIRED_OPTION:
-    errors.append(required % 'kind')
-
-  if config_file is REQUIRED_OPTION:
-    errors.append(required % 'config_file')
-
-  if download:
-    if result_db_filename is REQUIRED_OPTION:
-      errors.append(required % 'result_db_filename')
+  if kind is None:
+    if download or map:
+      errors.append('kind argument required for this operation')
+    elif not dump and not restore:
+      errors.append(
+          'kind argument required unless --dump or --restore is specified')
 
   if not app_id:
-    (unused_scheme, host_port, unused_url_path,
-     unused_query, unused_fragment) = urlparse.urlsplit(url)
-    suffix_idx = host_port.find('.appspot.com')
-    if suffix_idx > -1:
-      arg_dict['app_id'] = host_port[:suffix_idx]
-    elif host_port.split(':')[0].endswith('google.com'):
-      arg_dict['app_id'] = host_port.split('.')[0]
-    else:
-      errors.append('app_id argument required for non appspot.com domains')
+    if url and url is not REQUIRED_OPTION:
+      (unused_scheme, host_port, unused_url_path,
+       unused_query, unused_fragment) = urlparse.urlsplit(url)
+      suffix_idx = host_port.find('.appspot.com')
+      if suffix_idx > -1:
+        arg_dict['app_id'] = host_port[:suffix_idx]
+      elif host_port.split(':')[0].endswith('google.com'):
+        arg_dict['app_id'] = host_port.split('.')[0]
+      else:
+        errors.append('app_id argument required for non appspot.com domains')
 
   if errors:
     print >>sys.stderr, '\n'.join(errors)
@@ -4203,50 +3600,68 @@
   result_db_filename = arg_dict['result_db_filename']
   loader_opts = arg_dict['loader_opts']
   exporter_opts = arg_dict['exporter_opts']
+  mapper_opts = arg_dict['mapper_opts']
   email = arg_dict['email']
   passin = arg_dict['passin']
+  perform_map = arg_dict['map']
+  dump = arg_dict['dump']
+  restore = arg_dict['restore']
 
   os.environ['AUTH_DOMAIN'] = auth_domain
 
   kind = ParseKind(kind)
 
-  check_file(config_file)
-  if not download:
+  if not dump and not restore:
+    check_file(config_file)
+
+  if download and perform_map:
+    logger.error('--download and --map are mutually exclusive.')
+
+  if download or dump:
+    check_output_file(filename)
+  elif not perform_map:
     check_file(filename)
+
+  if dump:
+    Exporter.RegisterExporter(DumpExporter(kind, result_db_filename))
+  elif restore:
+    Loader.RegisterLoader(RestoreLoader(kind))
   else:
-    check_output_file(filename)
-
-  LoadConfig(config_file)
+    LoadConfig(config_file)
 
   os.environ['APPLICATION_ID'] = app_id
 
   throttle_layout = ThrottleLayout(bandwidth_limit, http_limit, rps_limit)
-
-  throttle = Throttle(layout=throttle_layout)
+  logger.info('Throttling transfers:')
+  logger.info('Bandwidth: %s bytes/second', bandwidth_limit)
+  logger.info('HTTP connections: %s/second', http_limit)
+  logger.info('Entities inserted/fetched/modified: %s/second', rps_limit)
+
+  throttle = remote_api_throttle.Throttle(layout=throttle_layout)
   signature = _MakeSignature(app_id=app_id,
                              url=url,
                              kind=kind,
                              db_filename=db_filename,
                              download=download,
+                             perform_map=perform_map,
                              has_header=has_header,
-                             result_db_filename=result_db_filename)
+                             result_db_filename=result_db_filename,
+                             dump=dump,
+                             restore=restore)
 
 
   max_queue_size = max(DEFAULT_QUEUE_SIZE, 3 * num_threads + 5)
 
   if db_filename == 'skip':
     progress_db = StubProgressDatabase()
-  elif not download:
+  elif not download and not perform_map and not dump:
     progress_db = ProgressDatabase(db_filename, signature)
   else:
     progress_db = ExportProgressDatabase(db_filename, signature)
 
-  if download:
-    result_db = ResultDatabase(result_db_filename, signature)
-
   return_code = 1
 
-  if not download:
+  if not download and not perform_map and not dump:
     loader = Loader.RegisteredLoader(kind)
     try:
       loader.initialize(filename, loader_opts)
@@ -4257,12 +3672,10 @@
                             workitem_generator_factory,
                             throttle,
                             progress_db,
-                            BulkLoaderThread,
                             ProgressTrackerThread,
                             max_queue_size,
                             RequestManager,
                             DataSourceThread,
-                            ReQueue,
                             Queue.Queue)
       try:
         return_code = app.Run()
@@ -4270,29 +3683,31 @@
         logger.info('Authentication Failed')
     finally:
       loader.finalize()
-  else:
+  elif not perform_map:
+    result_db = ResultDatabase(result_db_filename, signature)
     exporter = Exporter.RegisteredExporter(kind)
     try:
       exporter.initialize(filename, exporter_opts)
 
-      def KeyRangeGeneratorFactory(progress_queue, progress_gen):
-        return KeyRangeGenerator(kind, progress_queue, progress_gen)
+      def KeyRangeGeneratorFactory(request_manager, progress_queue,
+                                   progress_gen):
+        return KeyRangeItemGenerator(request_manager, kind, progress_queue,
+                                     progress_gen, DownloadItem)
 
       def ExportProgressThreadFactory(progress_queue, progress_db):
         return ExportProgressThread(kind,
                                     progress_queue,
                                     progress_db,
                                     result_db)
+
       app = BulkDownloaderApp(arg_dict,
                               KeyRangeGeneratorFactory,
                               throttle,
                               progress_db,
-                              BulkExporterThread,
                               ExportProgressThreadFactory,
                               0,
                               RequestManager,
                               DataSourceThread,
-                              ReQueue,
                               Queue.Queue)
       try:
         return_code = app.Run()
@@ -4300,6 +3715,35 @@
         logger.info('Authentication Failed')
     finally:
       exporter.finalize()
+  elif not download:
+    mapper = Mapper.RegisteredMapper(kind)
+    try:
+      mapper.initialize(mapper_opts)
+      def KeyRangeGeneratorFactory(request_manager, progress_queue,
+                                   progress_gen):
+        return KeyRangeItemGenerator(request_manager, kind, progress_queue,
+                                     progress_gen, MapperItem)
+
+      def MapperProgressThreadFactory(progress_queue, progress_db):
+        return MapperProgressThread(kind,
+                                    progress_queue,
+                                    progress_db)
+
+      app = BulkMapperApp(arg_dict,
+                          KeyRangeGeneratorFactory,
+                          throttle,
+                          progress_db,
+                          MapperProgressThreadFactory,
+                          0,
+                          RequestManager,
+                          DataSourceThread,
+                          Queue.Queue)
+      try:
+        return_code = app.Run()
+      except AuthenticationError:
+        logger.info('Authentication Failed')
+    finally:
+      mapper.finalize()
   return return_code
 
 
@@ -4335,8 +3779,17 @@
 
   logger.info('Logging to %s', log_file)
 
+  remote_api_throttle.logger.setLevel(level)
+  remote_api_throttle.logger.addHandler(file_handler)
+  remote_api_throttle.logger.addHandler(console)
+
   appengine_rpc.logger.setLevel(logging.WARN)
 
+  adaptive_thread_pool.logger.setLevel(logging.DEBUG)
+  adaptive_thread_pool.logger.addHandler(console)
+  adaptive_thread_pool.logger.addHandler(file_handler)
+  adaptive_thread_pool.logger.propagate = False
+
 
 def Run(arg_dict):
   """Sets up and runs the bulkloader, given the options as keyword arguments.
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver.py	Sun Sep 06 23:31:53 2009 +0200
@@ -49,6 +49,7 @@
 import dummy_thread
 import email.Utils
 import errno
+import heapq
 import httplib
 import imp
 import inspect
@@ -61,13 +62,13 @@
 import pickle
 import pprint
 import random
+import select
 
 import re
 import sre_compile
 import sre_constants
 import sre_parse
 
-import mimetypes
 import socket
 import sys
 import time
@@ -91,6 +92,7 @@
 from google.appengine.api.capabilities import capability_stub
 from google.appengine.api.labs.taskqueue import taskqueue_stub
 from google.appengine.api.memcache import memcache_stub
+from google.appengine.api.xmpp import xmpp_service_stub
 
 from google.appengine import dist
 
@@ -111,11 +113,13 @@
 FOOTER_TEMPLATE = 'logging_console_footer.html'
 
 DEFAULT_ENV = {
-  'GATEWAY_INTERFACE': 'CGI/1.1',
-  'AUTH_DOMAIN': 'gmail.com',
-  'TZ': 'UTC',
+    'GATEWAY_INTERFACE': 'CGI/1.1',
+    'AUTH_DOMAIN': 'gmail.com',
+    'TZ': 'UTC',
 }
 
+DEFAULT_SELECT_DELAY = 30.0
+
 for ext, mime_type in (('.asc', 'text/plain'),
                        ('.diff', 'text/plain'),
                        ('.csv', 'text/comma-separated-values'),
@@ -134,19 +138,24 @@
                                               'site-packages'))
 
 
+
 class Error(Exception):
   """Base-class for exceptions in this module."""
 
+
 class InvalidAppConfigError(Error):
   """The supplied application configuration file is invalid."""
 
+
 class AppConfigNotFoundError(Error):
   """Application configuration file not found."""
 
+
 class TemplatesNotLoadedError(Error):
   """Templates for the debugging console were not loaded."""
 
 
+
 def SplitURL(relative_url):
   """Splits a relative URL into its path and query-string components.
 
@@ -159,7 +168,8 @@
       script_name: Relative URL of the script that was accessed.
       query_string: String containing everything after the '?' character.
   """
-  scheme, netloc, path, query, fragment = urlparse.urlsplit(relative_url)
+  (unused_scheme, unused_netloc, path, query,
+   unused_fragment) = urlparse.urlsplit(relative_url)
   return path, query
 
 
@@ -182,6 +192,7 @@
   return 'http://%s%s' % (netloc, relative_url)
 
 
+
 class URLDispatcher(object):
   """Base-class for handling HTTP requests."""
 
@@ -231,6 +242,7 @@
     Args:
       dispatched_output: StringIO buffer containing the results from the
        dispatched
+      original_output: The original output file.
     """
     original_output.write(dispatched_output.read())
 
@@ -267,6 +279,10 @@
         URL; False if anyone can access this URL.
       admin_only: True if the user must be a logged-in administrator to
         access the URL; False if anyone can access the URL.
+
+    Raises:
+      TypeError: if dispatcher is not a URLDispatcher sub-class instance.
+      InvalidAppConfigError: if regex isn't valid.
     """
     if not isinstance(dispatcher, URLDispatcher):
       raise TypeError('dispatcher must be a URLDispatcher sub-class')
@@ -294,6 +310,7 @@
 
     Args:
       relative_url: Relative URL being accessed in a request.
+      split_url: Used for dependency injection.
 
     Returns:
       Tuple (dispatcher, matched_path, requires_login, admin_only), which are
@@ -302,7 +319,7 @@
       replaced using values matched by the URL pattern. If no match was found,
       dispatcher will be None.
     """
-    adjusted_url, query_string = split_url(relative_url)
+    adjusted_url, unused_query_string = split_url(relative_url)
 
     for url_tuple in self._url_patterns:
       url_re, dispatcher, path, requires_login, admin_only = url_tuple
@@ -325,6 +342,7 @@
     return set([url_tuple[1] for url_tuple in self._url_patterns])
 
 
+
 class MatcherDispatcher(URLDispatcher):
   """Dispatcher across multiple URLMatcher instances."""
 
@@ -338,7 +356,8 @@
     Args:
       login_url: Relative URL which should be used for handling user logins.
       url_matchers: Sequence of URLMatcher objects.
-      get_user_info, login_redirect: Used for dependency injection.
+      get_user_info: Used for dependency injection.
+      login_redirect: Used for dependency injection.
     """
     self._login_url = login_url
     self._url_matchers = tuple(url_matchers)
@@ -359,30 +378,30 @@
     path variable supplied to this method is ignored.
     """
     cookies = ', '.join(headers.getheaders('cookie'))
-    email, admin, user_id = self._get_user_info(cookies)
+    email_addr, admin, user_id = self._get_user_info(cookies)
 
     for matcher in self._url_matchers:
-      dispatcher, matched_path, requires_login, admin_only = matcher.Match(relative_url)
+      dispatcher, matched_path, requires_login, admin_only = matcher.Match(
+          relative_url)
       if dispatcher is None:
         continue
 
       logging.debug('Matched "%s" to %s with path %s',
                     relative_url, dispatcher, matched_path)
 
-      if (requires_login or admin_only) and not email:
+      if (requires_login or admin_only) and not email_addr:
         logging.debug('Login required, redirecting user')
-        self._login_redirect(
-          self._login_url,
-          base_env_dict['SERVER_NAME'],
-          base_env_dict['SERVER_PORT'],
-          relative_url,
-          outfile)
+        self._login_redirect(self._login_url,
+                             base_env_dict['SERVER_NAME'],
+                             base_env_dict['SERVER_PORT'],
+                             relative_url,
+                             outfile)
       elif admin_only and not admin:
         outfile.write('Status: %d Not authorized\r\n'
                       '\r\n'
                       'Current logged in user %s is not '
                       'authorized to view this page.'
-                      % (httplib.FORBIDDEN, email))
+                      % (httplib.FORBIDDEN, email_addr))
       else:
         forward = dispatcher.Dispatch(relative_url,
                                       matched_path,
@@ -393,7 +412,7 @@
 
         if forward:
           new_path, new_headers, new_input = forward
-          logging.info('Internal redirection to %s' % new_path)
+          logging.info('Internal redirection to %s', new_path)
           new_outfile = cStringIO.StringIO()
           self.Dispatch(new_path,
                         None,
@@ -413,6 +432,7 @@
                   % (httplib.NOT_FOUND, relative_url))
 
 
+
 class ApplicationLoggingHandler(logging.Handler):
   """Python Logging handler that displays the debugging console to users."""
 
@@ -487,7 +507,7 @@
       outfile: Output stream to which the console should be written if either
         a debug parameter was supplied or a logging cookie is present.
     """
-    script_name, query_string = SplitURL(relative_url)
+    unused_script_name, query_string = SplitURL(relative_url)
     param_dict = cgi.parse_qs(query_string, True)
     cookie_dict = Cookie.SimpleCookie(env.get('HTTP_COOKIE', ''))
     if 'debug' not in param_dict and self._COOKIE_NAME not in cookie_dict:
@@ -554,8 +574,8 @@
   env['CONTENT_LENGTH'] = headers.getheader('content-length', '')
 
   cookies = ', '.join(headers.getheaders('cookie'))
-  email, admin, user_id = get_user_info(cookies)
-  env['USER_EMAIL'] = email
+  email_addr, admin, user_id = get_user_info(cookies)
+  env['USER_EMAIL'] = email_addr
   env['USER_ID'] = user_id
   if admin:
     env['USER_IS_ADMIN'] = '1'
@@ -583,12 +603,11 @@
   """Fake for methods/functions that are not implemented in the production
   environment.
   """
-  raise NotImplementedError("This class/method is not available.")
+  raise NotImplementedError('This class/method is not available.')
 
 
 class NotImplementedFakeClass(object):
-  """Fake class for classes that are not implemented in the production
-  environment.
+  """Fake class for classes that are not implemented in the production env.
   """
   __init__ = NotImplementedFake
 
@@ -627,7 +646,7 @@
 def FakeURandom(n):
   """Fake version of os.urandom."""
   bytes = ''
-  for i in xrange(n):
+  for _ in range(n):
     bytes += chr(random.randint(0, 255))
   return bytes
 
@@ -665,9 +684,9 @@
   return original_setlocale(category, 'C')
 
 
-def FakeOpen(file, flags, mode=0777):
+def FakeOpen(filename, flags, mode=0777):
   """Fake version of os.open."""
-  raise OSError(errno.EPERM, "Operation not permitted", file)
+  raise OSError(errno.EPERM, "Operation not permitted", filename)
 
 
 def FakeRename(src, dst):
@@ -711,27 +730,27 @@
   return False
 
 SHARED_MODULE_PREFIXES = set([
-  'google',
-  'logging',
-  'sys',
-  'warnings',
-
-
-
-
-  're',
-  'sre_compile',
-  'sre_constants',
-  'sre_parse',
-
-
-
-
-  'wsgiref',
+    'google',
+    'logging',
+    'sys',
+    'warnings',
+
+
+
+
+    're',
+    'sre_compile',
+    'sre_constants',
+    'sre_parse',
+
+
+
+
+    'wsgiref',
 ])
 
 NOT_SHARED_MODULE_PREFIXES = set([
-  'google.appengine.ext',
+    'google.appengine.ext',
 ])
 
 
@@ -788,7 +807,7 @@
 
 
 def GeneratePythonPaths(*p):
-  """Generate all valid filenames for the given file
+  """Generate all valid filenames for the given file.
 
   Args:
     p: Positional args are the folders to the file and finally the file
@@ -814,8 +833,8 @@
                       if os.path.isfile(filename))
 
   ALLOWED_DIRS = set([
-    os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
-    os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
+      os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
+      os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
   ])
 
   NOT_ALLOWED_DIRS = set([
@@ -823,58 +842,58 @@
 
 
 
-    SITE_PACKAGES,
+      SITE_PACKAGES,
   ])
 
   ALLOWED_SITE_PACKAGE_DIRS = set(
-    os.path.normcase(os.path.abspath(os.path.join(SITE_PACKAGES, path)))
-    for path in [
-
-  ])
+      os.path.normcase(os.path.abspath(os.path.join(SITE_PACKAGES, path)))
+      for path in [
+
+          ])
 
   ALLOWED_SITE_PACKAGE_FILES = set(
-    os.path.normcase(os.path.abspath(os.path.join(
-      os.path.dirname(os.__file__), 'site-packages', path)))
-    for path in itertools.chain(*[
-
-      [os.path.join('Crypto')],
-      GeneratePythonPaths('Crypto', '__init__'),
-      [os.path.join('Crypto', 'Cipher')],
-      GeneratePythonPaths('Crypto', 'Cipher', '__init__'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'AES'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'ARC2'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'ARC4'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'Blowfish'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'CAST'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'DES'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'DES3'),
-      GeneratePythonPaths('Crypto', 'Cipher', 'XOR'),
-      [os.path.join('Crypto', 'Hash')],
-      GeneratePythonPaths('Crypto', 'Hash', '__init__'),
-      GeneratePythonPaths('Crypto', 'Hash', 'HMAC'),
-      os.path.join('Crypto', 'Hash', 'MD2'),
-      os.path.join('Crypto', 'Hash', 'MD4'),
-      GeneratePythonPaths('Crypto', 'Hash', 'MD5'),
-      GeneratePythonPaths('Crypto', 'Hash', 'SHA'),
-      os.path.join('Crypto', 'Hash', 'SHA256'),
-      os.path.join('Crypto', 'Hash', 'RIPEMD'),
-      [os.path.join('Crypto', 'Protocol')],
-      GeneratePythonPaths('Crypto', 'Protocol', '__init__'),
-      GeneratePythonPaths('Crypto', 'Protocol', 'AllOrNothing'),
-      GeneratePythonPaths('Crypto', 'Protocol', 'Chaffing'),
-      [os.path.join('Crypto', 'PublicKey')],
-      GeneratePythonPaths('Crypto', 'PublicKey', '__init__'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'DSA'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'ElGamal'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'RSA'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'pubkey'),
-      GeneratePythonPaths('Crypto', 'PublicKey', 'qNEW'),
-      [os.path.join('Crypto', 'Util')],
-      GeneratePythonPaths('Crypto', 'Util', '__init__'),
-      GeneratePythonPaths('Crypto', 'Util', 'RFC1751'),
-      GeneratePythonPaths('Crypto', 'Util', 'number'),
-      GeneratePythonPaths('Crypto', 'Util', 'randpool'),
-  ]))
+      os.path.normcase(os.path.abspath(os.path.join(
+          os.path.dirname(os.__file__), 'site-packages', path)))
+      for path in itertools.chain(*[
+
+          [os.path.join('Crypto')],
+          GeneratePythonPaths('Crypto', '__init__'),
+          [os.path.join('Crypto', 'Cipher')],
+          GeneratePythonPaths('Crypto', 'Cipher', '__init__'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'AES'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'ARC2'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'ARC4'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'Blowfish'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'CAST'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'DES'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'DES3'),
+          GeneratePythonPaths('Crypto', 'Cipher', 'XOR'),
+          [os.path.join('Crypto', 'Hash')],
+          GeneratePythonPaths('Crypto', 'Hash', '__init__'),
+          GeneratePythonPaths('Crypto', 'Hash', 'HMAC'),
+          os.path.join('Crypto', 'Hash', 'MD2'),
+          os.path.join('Crypto', 'Hash', 'MD4'),
+          GeneratePythonPaths('Crypto', 'Hash', 'MD5'),
+          GeneratePythonPaths('Crypto', 'Hash', 'SHA'),
+          os.path.join('Crypto', 'Hash', 'SHA256'),
+          os.path.join('Crypto', 'Hash', 'RIPEMD'),
+          [os.path.join('Crypto', 'Protocol')],
+          GeneratePythonPaths('Crypto', 'Protocol', '__init__'),
+          GeneratePythonPaths('Crypto', 'Protocol', 'AllOrNothing'),
+          GeneratePythonPaths('Crypto', 'Protocol', 'Chaffing'),
+          [os.path.join('Crypto', 'PublicKey')],
+          GeneratePythonPaths('Crypto', 'PublicKey', '__init__'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'DSA'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'ElGamal'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'RSA'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'pubkey'),
+          GeneratePythonPaths('Crypto', 'PublicKey', 'qNEW'),
+          [os.path.join('Crypto', 'Util')],
+          GeneratePythonPaths('Crypto', 'Util', '__init__'),
+          GeneratePythonPaths('Crypto', 'Util', 'RFC1751'),
+          GeneratePythonPaths('Crypto', 'Util', 'number'),
+          GeneratePythonPaths('Crypto', 'Util', 'randpool'),
+          ]))
 
   _original_file = file
 
@@ -912,7 +931,7 @@
 
   @staticmethod
   def SetAllowSkippedFiles(allow_skipped_files):
-    """Configures access to files matching FakeFile._skip_files
+    """Configures access to files matching FakeFile._skip_files.
 
     Args:
       allow_skipped_files: Boolean whether to allow access to skipped files
@@ -1106,6 +1125,7 @@
   return fullname.rsplit('.', 1)[-1]
 
 
+
 class CouldNotFindModuleError(ImportError):
   """Raised when a module could not be found.
 
@@ -1115,10 +1135,19 @@
 
 
 def Trace(func):
-  """Decorator that logs the call stack of the HardenedModulesHook class as
+  """Call stack logging decorator for HardenedModulesHook class.
+
+  This decorator logs the call stack of the HardenedModulesHook class as
   it executes, indenting logging messages based on the current stack depth.
+
+  Args:
+    func: the function to decorate.
+
+  Returns:
+    The decorated function.
   """
-  def decorate(self, *args, **kwargs):
+
+  def Decorate(self, *args, **kwargs):
     args_to_show = []
     if args is not None:
       args_to_show.extend(str(argument) for argument in args)
@@ -1136,7 +1165,7 @@
       self._indent_level -= 1
       self.log('Exiting %s(%s)', func.func_name, args_string)
 
-  return decorate
+  return Decorate
 
 
 class HardenedModulesHook(object):
@@ -1173,229 +1202,229 @@
       print >>sys.stderr, indent + (message % args)
 
   _WHITE_LIST_C_MODULES = [
-    'AES',
-    'ARC2',
-    'ARC4',
-    'Blowfish',
-    'CAST',
-    'DES',
-    'DES3',
-    'MD2',
-    'MD4',
-    'RIPEMD',
-    'SHA256',
-    'XOR',
-
-    '_Crypto_Cipher__AES',
-    '_Crypto_Cipher__ARC2',
-    '_Crypto_Cipher__ARC4',
-    '_Crypto_Cipher__Blowfish',
-    '_Crypto_Cipher__CAST',
-    '_Crypto_Cipher__DES',
-    '_Crypto_Cipher__DES3',
-    '_Crypto_Cipher__XOR',
-    '_Crypto_Hash__MD2',
-    '_Crypto_Hash__MD4',
-    '_Crypto_Hash__RIPEMD',
-    '_Crypto_Hash__SHA256',
-    'array',
-    'binascii',
-    'bz2',
-    'cmath',
-    'collections',
-    'crypt',
-    'cStringIO',
-    'datetime',
-    'errno',
-    'exceptions',
-    'gc',
-    'itertools',
-    'math',
-    'md5',
-    'operator',
-    'posix',
-    'posixpath',
-    'pyexpat',
-    'sha',
-    'struct',
-    'sys',
-    'time',
-    'timing',
-    'unicodedata',
-    'zlib',
-    '_ast',
-    '_bisect',
-    '_codecs',
-    '_codecs_cn',
-    '_codecs_hk',
-    '_codecs_iso2022',
-    '_codecs_jp',
-    '_codecs_kr',
-    '_codecs_tw',
-    '_collections',
-    '_csv',
-    '_elementtree',
-    '_functools',
-    '_hashlib',
-    '_heapq',
-    '_locale',
-    '_lsprof',
-    '_md5',
-    '_multibytecodec',
-    '_random',
-    '_sha',
-    '_sha256',
-    '_sha512',
-    '_sre',
-    '_struct',
-    '_types',
-    '_weakref',
-    '__main__',
+      'AES',
+      'ARC2',
+      'ARC4',
+      'Blowfish',
+      'CAST',
+      'DES',
+      'DES3',
+      'MD2',
+      'MD4',
+      'RIPEMD',
+      'SHA256',
+      'XOR',
+
+      '_Crypto_Cipher__AES',
+      '_Crypto_Cipher__ARC2',
+      '_Crypto_Cipher__ARC4',
+      '_Crypto_Cipher__Blowfish',
+      '_Crypto_Cipher__CAST',
+      '_Crypto_Cipher__DES',
+      '_Crypto_Cipher__DES3',
+      '_Crypto_Cipher__XOR',
+      '_Crypto_Hash__MD2',
+      '_Crypto_Hash__MD4',
+      '_Crypto_Hash__RIPEMD',
+      '_Crypto_Hash__SHA256',
+      'array',
+      'binascii',
+      'bz2',
+      'cmath',
+      'collections',
+      'crypt',
+      'cStringIO',
+      'datetime',
+      'errno',
+      'exceptions',
+      'gc',
+      'itertools',
+      'math',
+      'md5',
+      'operator',
+      'posix',
+      'posixpath',
+      'pyexpat',
+      'sha',
+      'struct',
+      'sys',
+      'time',
+      'timing',
+      'unicodedata',
+      'zlib',
+      '_ast',
+      '_bisect',
+      '_codecs',
+      '_codecs_cn',
+      '_codecs_hk',
+      '_codecs_iso2022',
+      '_codecs_jp',
+      '_codecs_kr',
+      '_codecs_tw',
+      '_collections',
+      '_csv',
+      '_elementtree',
+      '_functools',
+      '_hashlib',
+      '_heapq',
+      '_locale',
+      '_lsprof',
+      '_md5',
+      '_multibytecodec',
+      '_random',
+      '_sha',
+      '_sha256',
+      '_sha512',
+      '_sre',
+      '_struct',
+      '_types',
+      '_weakref',
+      '__main__',
   ]
 
   __CRYPTO_CIPHER_ALLOWED_MODULES = [
-    'MODE_CBC',
-    'MODE_CFB',
-    'MODE_CTR',
-    'MODE_ECB',
-    'MODE_OFB',
-    'block_size',
-    'key_size',
-    'new',
+      'MODE_CBC',
+      'MODE_CFB',
+      'MODE_CTR',
+      'MODE_ECB',
+      'MODE_OFB',
+      'block_size',
+      'key_size',
+      'new',
   ]
   _WHITE_LIST_PARTIAL_MODULES = {
-    'Crypto.Cipher.AES': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.ARC2': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.Blowfish': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.CAST': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.DES': __CRYPTO_CIPHER_ALLOWED_MODULES,
-    'Crypto.Cipher.DES3': __CRYPTO_CIPHER_ALLOWED_MODULES,
-
-    'gc': [
-      'enable',
-      'disable',
-      'isenabled',
-      'collect',
-      'get_debug',
-      'set_threshold',
-      'get_threshold',
-      'get_count'
-    ],
-
-
-
-    'os': [
-      'access',
-      'altsep',
-      'curdir',
-      'defpath',
-      'devnull',
-      'environ',
-      'error',
-      'extsep',
-      'EX_NOHOST',
-      'EX_NOINPUT',
-      'EX_NOPERM',
-      'EX_NOUSER',
-      'EX_OK',
-      'EX_OSERR',
-      'EX_OSFILE',
-      'EX_PROTOCOL',
-      'EX_SOFTWARE',
-      'EX_TEMPFAIL',
-      'EX_UNAVAILABLE',
-      'EX_USAGE',
-      'F_OK',
-      'getcwd',
-      'getcwdu',
-      'getenv',
-      'listdir',
-      'lstat',
-      'name',
-      'NGROUPS_MAX',
-      'O_APPEND',
-      'O_CREAT',
-      'O_DIRECT',
-      'O_DIRECTORY',
-      'O_DSYNC',
-      'O_EXCL',
-      'O_LARGEFILE',
-      'O_NDELAY',
-      'O_NOCTTY',
-      'O_NOFOLLOW',
-      'O_NONBLOCK',
-      'O_RDONLY',
-      'O_RDWR',
-      'O_RSYNC',
-      'O_SYNC',
-      'O_TRUNC',
-      'O_WRONLY',
-      'open',
-      'pardir',
-      'path',
-      'pathsep',
-      'R_OK',
-      'readlink',
-      'remove',
-      'rename',
-      'SEEK_CUR',
-      'SEEK_END',
-      'SEEK_SET',
-      'sep',
-      'stat',
-      'stat_float_times',
-      'stat_result',
-      'strerror',
-      'TMP_MAX',
-      'unlink',
-      'urandom',
-      'utime',
-      'walk',
-      'WCOREDUMP',
-      'WEXITSTATUS',
-      'WIFEXITED',
-      'WIFSIGNALED',
-      'WIFSTOPPED',
-      'WNOHANG',
-      'WSTOPSIG',
-      'WTERMSIG',
-      'WUNTRACED',
-      'W_OK',
-      'X_OK',
-    ],
+      'Crypto.Cipher.AES': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.ARC2': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.Blowfish': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.CAST': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.DES': __CRYPTO_CIPHER_ALLOWED_MODULES,
+      'Crypto.Cipher.DES3': __CRYPTO_CIPHER_ALLOWED_MODULES,
+
+      'gc': [
+          'enable',
+          'disable',
+          'isenabled',
+          'collect',
+          'get_debug',
+          'set_threshold',
+          'get_threshold',
+          'get_count'
+      ],
+
+
+
+      'os': [
+          'access',
+          'altsep',
+          'curdir',
+          'defpath',
+          'devnull',
+          'environ',
+          'error',
+          'extsep',
+          'EX_NOHOST',
+          'EX_NOINPUT',
+          'EX_NOPERM',
+          'EX_NOUSER',
+          'EX_OK',
+          'EX_OSERR',
+          'EX_OSFILE',
+          'EX_PROTOCOL',
+          'EX_SOFTWARE',
+          'EX_TEMPFAIL',
+          'EX_UNAVAILABLE',
+          'EX_USAGE',
+          'F_OK',
+          'getcwd',
+          'getcwdu',
+          'getenv',
+          'listdir',
+          'lstat',
+          'name',
+          'NGROUPS_MAX',
+          'O_APPEND',
+          'O_CREAT',
+          'O_DIRECT',
+          'O_DIRECTORY',
+          'O_DSYNC',
+          'O_EXCL',
+          'O_LARGEFILE',
+          'O_NDELAY',
+          'O_NOCTTY',
+          'O_NOFOLLOW',
+          'O_NONBLOCK',
+          'O_RDONLY',
+          'O_RDWR',
+          'O_RSYNC',
+          'O_SYNC',
+          'O_TRUNC',
+          'O_WRONLY',
+          'open',
+          'pardir',
+          'path',
+          'pathsep',
+          'R_OK',
+          'readlink',
+          'remove',
+          'rename',
+          'SEEK_CUR',
+          'SEEK_END',
+          'SEEK_SET',
+          'sep',
+          'stat',
+          'stat_float_times',
+          'stat_result',
+          'strerror',
+          'TMP_MAX',
+          'unlink',
+          'urandom',
+          'utime',
+          'walk',
+          'WCOREDUMP',
+          'WEXITSTATUS',
+          'WIFEXITED',
+          'WIFSIGNALED',
+          'WIFSTOPPED',
+          'WNOHANG',
+          'WSTOPSIG',
+          'WTERMSIG',
+          'WUNTRACED',
+          'W_OK',
+          'X_OK',
+      ],
   }
 
   _MODULE_OVERRIDES = {
-    'locale': {
-      'setlocale': FakeSetLocale,
-    },
-
-    'os': {
-      'access': FakeAccess,
-      'listdir': RestrictedPathFunction(os.listdir),
-
-      'lstat': RestrictedPathFunction(os.stat),
-      'open': FakeOpen,
-      'readlink': FakeReadlink,
-      'remove': FakeUnlink,
-      'rename': FakeRename,
-      'stat': RestrictedPathFunction(os.stat),
-      'uname': FakeUname,
-      'unlink': FakeUnlink,
-      'urandom': FakeURandom,
-      'utime': FakeUTime,
-    },
-
-    'distutils.util': {
-      'get_platform': FakeGetPlatform,
-    },
+      'locale': {
+          'setlocale': FakeSetLocale,
+      },
+
+      'os': {
+          'access': FakeAccess,
+          'listdir': RestrictedPathFunction(os.listdir),
+
+          'lstat': RestrictedPathFunction(os.stat),
+          'open': FakeOpen,
+          'readlink': FakeReadlink,
+          'remove': FakeUnlink,
+          'rename': FakeRename,
+          'stat': RestrictedPathFunction(os.stat),
+          'uname': FakeUname,
+          'unlink': FakeUnlink,
+          'urandom': FakeURandom,
+          'utime': FakeUTime,
+      },
+
+      'distutils.util': {
+          'get_platform': FakeGetPlatform,
+      },
   }
 
   _ENABLED_FILE_TYPES = (
-    imp.PKG_DIRECTORY,
-    imp.PY_SOURCE,
-    imp.PY_COMPILED,
-    imp.C_BUILTIN,
+      imp.PKG_DIRECTORY,
+      imp.PY_SOURCE,
+      imp.PY_COMPILED,
+      imp.C_BUILTIN,
   )
 
   def __init__(self,
@@ -1822,6 +1851,7 @@
     return compile(source_code, full_path, 'exec')
 
 
+
 def ModuleHasValidMainFunction(module):
   """Determines if a module has a main function that takes no arguments.
 
@@ -1835,7 +1865,8 @@
     True if the module has a valid, reusable main function; False otherwise.
   """
   if hasattr(module, 'main') and type(module.main) is types.FunctionType:
-    arg_names, var_args, var_kwargs, default_values = inspect.getargspec(module.main)
+    arg_names, var_args, var_kwargs, default_values = inspect.getargspec(
+        module.main)
     if len(arg_names) == 0:
       return True
     if default_values is not None and len(arg_names) == len(default_values):
@@ -1878,6 +1909,7 @@
     cgi_path: Absolute path of the CGI module file on disk.
     module_fullname: Fully qualified Python module name used to import the
       cgi_path module.
+    isfile: Used for testing.
 
   Returns:
     List containing the paths to the missing __init__.py files.
@@ -1935,7 +1967,7 @@
   module_fullname = GetScriptModuleName(handler_path)
   script_module = module_dict.get(module_fullname)
   module_code = None
-  if script_module != None and ModuleHasValidMainFunction(script_module):
+  if script_module is not None and ModuleHasValidMainFunction(script_module):
     logging.debug('Reusing main() function of module "%s"', module_fullname)
   else:
     if script_module is None:
@@ -1944,7 +1976,8 @@
 
     try:
       module_code = import_hook.get_code(module_fullname)
-      full_path, search_path, submodule = import_hook.GetModuleInfo(module_fullname)
+      full_path, search_path, submodule = (
+        import_hook.GetModuleInfo(module_fullname))
       script_module.__file__ = full_path
       if search_path is not None:
         script_module.__path__ = search_path
@@ -1955,7 +1988,7 @@
         import_error_message += ': ' + str(exc_value)
 
       logging.exception('Encountered error loading module "%s": %s',
-                    module_fullname, import_error_message)
+                        module_fullname, import_error_message)
       missing_inits = FindMissingInitFiles(cgi_path, module_fullname)
       if missing_inits:
         logging.warning('Missing package initialization files: %s',
@@ -1989,8 +2022,10 @@
 
 
 def ExecuteOrImportScript(handler_path, cgi_path, import_hook):
-  """Executes a CGI script by importing it as a new module; possibly reuses
-  the module's main() function if it is defined and takes no arguments.
+  """Executes a CGI script by importing it as a new module.
+
+  This possibly reuses the module's main() function if it is defined and
+  takes no arguments.
 
   Basic technique lifted from PEP 338 and Python2.5's runpy module. See:
     http://www.python.org/dev/peps/pep-0338/
@@ -2260,6 +2295,7 @@
     return 'Local CGI dispatcher for %s' % self._cgi_func
 
 
+
 class PathAdjuster(object):
   """Adjusts application file paths to paths relative to the application or
   external library directories."""
@@ -2273,8 +2309,10 @@
     self._root_path = os.path.abspath(root_path)
 
   def AdjustPath(self, path):
-    """Adjusts application file path to paths relative to the application or
-    external library directories.
+    """Adjusts application file paths to relative to the application.
+
+    More precisely this method adjusts application file path to paths
+    relative to the application or external library directories.
 
     Handler paths that start with $PYTHON_LIB will be converted to paths
     relative to the google directory.
@@ -2294,6 +2332,7 @@
     return path
 
 
+
 class StaticFileConfigMatcher(object):
   """Keeps track of file/directory specific application configuration.
 
@@ -2382,13 +2421,13 @@
       String containing the mime type to use. Will be 'application/octet-stream'
       if we have no idea what it should be.
     """
-    for (path_re, mime_type, expiration) in self._patterns:
-      if mime_type is not None:
+    for (path_re, mimetype, unused_expiration) in self._patterns:
+      if mimetype is not None:
         the_match = path_re.match(path)
         if the_match:
-          return mime_type
-
-    filename, extension = os.path.splitext(path)
+          return mimetype
+
+    unused_filename, extension = os.path.splitext(path)
     return mimetypes.types_map.get(extension, 'application/octet-stream')
 
   def GetExpiration(self, path):
@@ -2400,7 +2439,7 @@
     Returns:
       Integer number of seconds to be used for browser cache expiration time.
     """
-    for (path_re, mime_type, expiration) in self._patterns:
+    for (path_re, unused_mimetype, expiration) in self._patterns:
       the_match = path_re.match(path)
       if the_match:
         return expiration
@@ -2409,6 +2448,7 @@
 
 
 
+
 def ReadDataFile(data_path, openfile=file):
   """Reads a file on disk, returning a corresponding HTTP status and data.
 
@@ -2548,6 +2588,8 @@
   """Update the cache header."""
   if not 'Cache-Control' in headers:
     headers['Cache-Control'] = 'no-cache'
+    if not 'Expires' in headers:
+      headers['Expires'] = 'Fri, 01 Jan 1990 00:00:00 GMT'
   return status_code, status_message, headers, body
 
 
@@ -2609,7 +2651,7 @@
           ParseStatusRewriter,
           CacheRewriter,
           ContentLengthRewriter,
-  ]
+         ]
 
 
 def RewriteResponse(response_file, response_rewriters=None):
@@ -2663,6 +2705,7 @@
   return status_code, status_message, header_data, response_file.read()
 
 
+
 class ModuleManager(object):
   """Manages loaded modules in the runtime.
 
@@ -2695,7 +2738,7 @@
       Path of the module's corresponding Python source file if it exists, or
       just the module's compiled Python file. If the module has an invalid
       __file__ attribute, None will be returned.
-      """
+    """
     module_file = getattr(module, '__file__', None)
     if module_file is None:
       return None
@@ -2727,8 +2770,7 @@
     return False
 
   def UpdateModuleFileModificationTimes(self):
-    """Records the current modification times of all monitored modules.
-    """
+    """Records the current modification times of all monitored modules."""
     self._modification_times.clear()
     for name, module in self._modules.items():
       if not isinstance(module, types.ModuleType):
@@ -2750,23 +2792,29 @@
     sys.path_hooks[:] = self._save_path_hooks
 
 
+
 def _ClearTemplateCache(module_dict=sys.modules):
   """Clear template cache in webapp.template module.
 
   Attempts to load template module.  Ignores failure.  If module loads, the
   template cache is cleared.
+
+  Args:
+    module_dict: Used for dependency injection.
   """
   template_module = module_dict.get('google.appengine.ext.webapp.template')
   if template_module is not None:
     template_module.template_cache.clear()
 
 
+
 def CreateRequestHandler(root_path,
                          login_url,
                          require_indexes=False,
                          static_caching=True):
-  """Creates a new BaseHTTPRequestHandler sub-class for use with the Python
-  BaseHTTPServer module's HTTP server.
+  """Creates a new BaseHTTPRequestHandler sub-class.
+
+  This class will be used with the Python BaseHTTPServer module's HTTP server.
 
   Python's built-in HTTP server does not support passing context information
   along to instances of its request handlers. This function gets around that
@@ -2792,9 +2840,11 @@
   application_config_cache = AppConfigCache()
 
   class DevAppServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-    """Dispatches URLs using patterns from a URLMatcher, which is created by
-    loading an application's configuration file. Executes CGI scripts in the
-    local process so the scripts can use mock versions of APIs.
+    """Dispatches URLs using patterns from a URLMatcher.
+
+    The URLMatcher is created by loading an application's configuration file.
+    Executes CGI scripts in the local process so the scripts can use mock
+    versions of APIs.
 
     HTTP requests that correctly specify a user info cookie
     (dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment
@@ -2819,13 +2869,13 @@
       """Initializer.
 
       Args:
-        args, kwargs: Positional and keyword arguments passed to the constructor
-          of the super class.
+        args: Positional arguments passed to the superclass constructor.
+        kwargs: Keyword arguments passed to the superclass constructor.
       """
       BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
 
     def version_string(self):
-      """Returns server's version string used for Server HTTP header"""
+      """Returns server's version string used for Server HTTP header."""
       return self.server_version
 
     def do_GET(self):
@@ -2862,12 +2912,12 @@
       server_name = server_name.split(':', 1)[0]
 
       env_dict = {
-        'REQUEST_METHOD': self.command,
-        'REMOTE_ADDR': self.client_address[0],
-        'SERVER_SOFTWARE': self.server_version,
-        'SERVER_NAME': server_name,
-        'SERVER_PROTOCOL': self.protocol_version,
-        'SERVER_PORT': str(self.server.server_port),
+          'REQUEST_METHOD': self.command,
+          'REMOTE_ADDR': self.client_address[0],
+          'SERVER_SOFTWARE': self.server_version,
+          'SERVER_NAME': server_name,
+          'SERVER_PROTOCOL': self.protocol_version,
+          'SERVER_PORT': str(self.server.server_port),
       }
 
       full_url = GetFullURL(server_name, self.server.server_port, self.path)
@@ -2889,8 +2939,9 @@
                                                  cache=self.config_cache,
                                                  static_caching=static_caching)
         if config.api_version != API_VERSION:
-          logging.error("API versions cannot be switched dynamically: %r != %r"
-                        % (config.api_version, API_VERSION))
+          logging.error(
+              "API versions cannot be switched dynamically: %r != %r",
+              config.api_version, API_VERSION)
           sys.exit(1)
         env_dict['CURRENT_VERSION_ID'] = config.version + ".1"
         env_dict['APPLICATION_ID'] = config.application
@@ -2927,7 +2978,8 @@
         outfile.flush()
         outfile.seek(0)
 
-        status_code, status_message, header_data, body = RewriteResponse(outfile, self.rewriter_chain)
+        status_code, status_message, header_data, body = (
+            RewriteResponse(outfile, self.rewriter_chain))
 
         runtime_response_size = len(outfile.getvalue())
         if runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE:
@@ -2984,6 +3036,7 @@
   return DevAppServerRequestHandler
 
 
+
 def ReadAppConfig(appinfo_path, parse_app_config=appinfo.LoadSingleAppInfo):
   """Reads app.yaml file and returns its app id and list of URLMap instances.
 
@@ -3001,9 +3054,9 @@
   """
   try:
     appinfo_file = file(appinfo_path, 'r')
-  except IOError, e:
+  except IOError, unused_e:
     raise InvalidAppConfigError(
-      'Application configuration could not be read from "%s"' % appinfo_path)
+        'Application configuration could not be read from "%s"' % appinfo_path)
   try:
     return parse_app_config(appinfo_file)
   finally:
@@ -3035,11 +3088,17 @@
     default_expiration: String describing default expiration time for browser
       based caching of static files.  If set to None this disallows any
       browser caching of static content.
-    create_url_matcher, create_cgi_dispatcher, create_file_dispatcher,
+    create_url_matcher: Used for dependency injection.
+    create_cgi_dispatcher: Used for dependency injection.
+    create_file_dispatcher: Used for dependency injection.
     create_path_adjuster: Used for dependency injection.
+    normpath: Used for dependency injection.
 
   Returns:
     Instance of URLMatcher with the supplied URLMap objects properly loaded.
+
+  Raises:
+    InvalidAppConfigError: if the handler in url_map_list is an unknown type.
   """
   url_matcher = create_url_matcher()
   path_adjuster = create_path_adjuster(root_path)
@@ -3121,10 +3180,14 @@
       sys.modules dictionary.
     cache: Instance of AppConfigCache or None.
     static_caching: True if browser caching of static files should be allowed.
-    read_app_config, create_matcher: Used for dependency injection.
+    read_app_config: Used for dependency injection.
+    create_matcher: Used for dependency injection.
 
   Returns:
      tuple: (AppInfoExternal, URLMatcher)
+
+  Raises:
+    AppConfigNotFound: if an app.yaml file cannot be found.
   """
   for appinfo_path in [os.path.join(root_path, 'app.yaml'),
                        os.path.join(root_path, 'app.yml')]:
@@ -3181,23 +3244,26 @@
   Raises:
     If the config file is unreadable, empty or invalid, this function will
     raise an InvalidAppConfigError or a MalformedCronConfiguration exception.
-    """
+  """
   try:
     croninfo_file = file(croninfo_path, 'r')
   except IOError, e:
     raise InvalidAppConfigError(
-        'Cron configuration could not be read from "%s"' % croninfo_path)
+        'Cron configuration could not be read from "%s": %s'
+        % (croninfo_path, e))
   try:
     return parse_cron_config(croninfo_file)
   finally:
     croninfo_file.close()
 
 
+
 def SetupStubs(app_id, **config):
   """Sets up testing stubs of APIs.
 
   Args:
     app_id: Application ID being served.
+    config: keyword arguments.
 
   Keywords:
     root_path: Root path to the directory of the application which should
@@ -3256,47 +3322,53 @@
                                 dev_appserver_login.LOGOUT_PARAM)
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'user',
-    user_service_stub.UserServiceStub(login_url=fixed_login_url,
-                                      logout_url=fixed_logout_url))
+      'user',
+      user_service_stub.UserServiceStub(login_url=fixed_login_url,
+                                        logout_url=fixed_logout_url))
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'urlfetch',
-    urlfetch_stub.URLFetchServiceStub())
+      'urlfetch',
+      urlfetch_stub.URLFetchServiceStub())
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'mail',
-    mail_stub.MailServiceStub(smtp_host,
-                              smtp_port,
-                              smtp_user,
-                              smtp_password,
-                              enable_sendmail=enable_sendmail,
-                              show_mail_body=show_mail_body))
+      'mail',
+      mail_stub.MailServiceStub(smtp_host,
+                                smtp_port,
+                                smtp_user,
+                                smtp_password,
+                                enable_sendmail=enable_sendmail,
+                                show_mail_body=show_mail_body))
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'memcache',
-    memcache_stub.MemcacheServiceStub())
+      'memcache',
+      memcache_stub.MemcacheServiceStub())
+
+  apiproxy_stub_map.apiproxy.RegisterStub(
+      'capability_service',
+      capability_stub.CapabilityServiceStub())
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'capability_service',
-    capability_stub.CapabilityServiceStub())
+      'taskqueue',
+      taskqueue_stub.TaskQueueServiceStub(root_path=root_path))
 
   apiproxy_stub_map.apiproxy.RegisterStub(
-    'taskqueue',
-    taskqueue_stub.TaskQueueServiceStub(root_path=root_path))
+      'xmpp',
+      xmpp_service_stub.XmppServiceStub())
+
 
 
   try:
     from google.appengine.api.images import images_stub
     apiproxy_stub_map.apiproxy.RegisterStub(
-      'images',
-      images_stub.ImagesServiceStub())
+        'images',
+        images_stub.ImagesServiceStub())
   except ImportError, e:
     logging.warning('Could not initialize images API; you are likely missing '
                     'the Python "PIL" module. ImportError: %s', e)
     from google.appengine.api.images import images_not_implemented_stub
-    apiproxy_stub_map.apiproxy.RegisterStub('images',
-      images_not_implemented_stub.ImagesNotImplementedServiceStub())
+    apiproxy_stub_map.apiproxy.RegisterStub(
+        'images',
+        images_not_implemented_stub.ImagesNotImplementedServiceStub())
 
 
 def CreateImplicitMatcher(module_dict,
@@ -3314,7 +3386,9 @@
     module_dict: Dictionary in the form used by sys.modules.
     root_path: Path to the root of the application.
     login_url: Relative URL which should be used for handling user login/logout.
+    create_path_adjuster: Used for dependedency injection.
     create_local_dispatcher: Used for dependency injection.
+    create_cgi_dispatcher: Used for dependedency injection.
 
   Returns:
     Instance of URLMatcher with appropriate dispatchers.
@@ -3393,6 +3467,7 @@
       are stored.
     serve_address: Address on which the server should serve.
     require_indexes: True if index.yaml is read-only gospel; default False.
+    allow_skipped_files: True if skipped files should be accessible.
     static_caching: True if browser caching of static files should be allowed.
     python_path_list: Used for dependency injection.
     sdk_dir: Directory where the SDK is stored.
@@ -3415,5 +3490,53 @@
 
   if absolute_root_path not in python_path_list:
     python_path_list.insert(0, absolute_root_path)
-
-  return BaseHTTPServer.HTTPServer((serve_address, port), handler_class)
+  return HTTPServerWithScheduler((serve_address, port), handler_class)
+
+
+class HTTPServerWithScheduler(BaseHTTPServer.HTTPServer):
+  """A BaseHTTPServer subclass that calls a method at a regular interval."""
+
+  def __init__(self, server_address, request_handler_class):
+    """Constructor.
+
+    Args:
+      server_address: the bind address of the server.
+      request_handler_class: class used to handle requests.
+    """
+    BaseHTTPServer.HTTPServer.__init__(self, server_address,
+                                       request_handler_class)
+    self._events = []
+
+  def get_request(self, time_func=time.time, select_func=select.select):
+    """Overrides the base get_request call.
+
+    Args:
+      time_func: used for testing.
+      select_func: used for testing.
+
+    Returns:
+      a (socket_object, address info) tuple.
+    """
+    while True:
+      if self._events:
+        current_time = time_func()
+        next_eta = self._events[0][0]
+        delay = next_eta - current_time
+      else:
+        delay = DEFAULT_SELECT_DELAY
+      readable, _, _ = select_func([self.socket], [], [], max(delay, 0))
+      if readable:
+        return self.socket.accept()
+      current_time = time_func()
+      if self._events and current_time >= self._events[0][0]:
+        unused_eta, runnable = heapq.heappop(self._events)
+        runnable()
+
+  def AddEvent(self, eta, runnable):
+    """Add a runnable event to be run at the specified time.
+
+    Args:
+      eta: when to run the event, in seconds since epoch.
+      runnable: a callable object.
+    """
+    heapq.heappush(self._events, (eta, runnable))
--- a/thirdparty/google_appengine/google/appengine/tools/dev_appserver_main.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/appengine/tools/dev_appserver_main.py	Sun Sep 06 23:31:53 2009 +0200
@@ -68,10 +68,15 @@
 import logging
 import os
 import re
+import signal
 import sys
 import traceback
 import tempfile
 
+logging.basicConfig(
+    level=logging.INFO,
+    format='%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s')
+
 
 def SetGlobals():
   """Set various global variables involving the 'google' package.
@@ -379,6 +384,14 @@
   return server
 
 
+def SigTermHandler(signum, frame):
+  """Handler for TERM signal.
+
+  Raises a KeyboardInterrupt to perform a graceful shutdown on SIGTERM signal.
+  """
+  raise KeyboardInterrupt()
+
+
 def main(argv):
   """Runs the development application server."""
   args, option_dict = ParseArguments(argv)
@@ -419,9 +432,7 @@
 
   option_dict['root_path'] = os.path.realpath(root_path)
 
-  logging.basicConfig(
-    level=log_level,
-    format='%(levelname)-8s %(asctime)s %(filename)s:%(lineno)s] %(message)s')
+  logging.getLogger().setLevel(log_level)
 
   config = None
   try:
@@ -461,6 +472,8 @@
       allow_skipped_files=allow_skipped_files,
       static_caching=static_caching)
 
+  signal.signal(signal.SIGTERM, SigTermHandler)
+
   logging.info('Running application %s on port %d: http://%s:%d',
                config.application, port, serve_address, port)
   try:
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/remote_api_shell.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""An interactive python shell that uses remote_api.
+
+Usage:
+  remote_api_shell.py [-s HOSTNAME] APPID [PATH]
+"""
+
+
+import atexit
+import code
+import getpass
+import optparse
+import os
+import sys
+
+try:
+  import readline
+except ImportError:
+  readline = None
+
+from google.appengine.ext.remote_api import remote_api_stub
+
+from google.appengine.api import datastore
+from google.appengine.api import memcache
+from google.appengine.api import urlfetch
+from google.appengine.api import users
+from google.appengine.ext import db
+from google.appengine.ext import search
+
+
+HISTORY_PATH = os.path.expanduser('~/.remote_api_shell_history')
+DEFAULT_PATH = '/remote_api'
+BANNER = """App Engine remote_api shell
+Python %s
+The db, users, urlfetch, and memcache modules are imported.""" % sys.version
+
+
+def auth_func():
+  return (raw_input('Email: '), getpass.getpass('Password: '))
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--server', dest='server',
+                    help='The hostname your app is deployed on. '
+                         'Defaults to <app_id>.appspot.com.')
+  (options, args) = parser.parse_args()
+
+  if not args or len(args) > 2:
+    print >> sys.stderr, __doc__
+    if len(args) > 2:
+      print >> sys.stderr, 'Unexpected arguments: %s' % args[2:]
+    sys.exit(1)
+
+  appid = args[0]
+  if len(args) == 2:
+    path = args[1]
+  else:
+    path = DEFAULT_PATH
+
+  remote_api_stub.ConfigureRemoteApi(appid, path, auth_func,
+                                     servername=options.server)
+  remote_api_stub.MaybeInvokeAuthentication()
+
+  os.environ['SERVER_SOFTWARE'] = 'Development (remote_api_shell)/1.0'
+
+  sys.ps1 = '%s> ' % appid
+  if readline is not None:
+    readline.parse_and_bind('tab: complete')
+    atexit.register(lambda: readline.write_history_file(HISTORY_PATH))
+    if os.path.exists(HISTORY_PATH):
+      readline.read_history_file(HISTORY_PATH)
+
+  code.interact(banner=BANNER, local=globals())
+
+
+if __name__ == '__main__':
+  main(sys.argv)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/appengine/tools/requeue.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A thread-safe queue in which removed objects put back to the front."""
+
+
+import logging
+import Queue
+import threading
+import time
+
+logger = logging.getLogger('google.appengine.tools.requeue')
+
+
+class ReQueue(object):
+  """A special thread-safe queue.
+
+  A ReQueue allows unfinished work items to be returned with a call to
+  reput().  When an item is reput, task_done() should *not* be called
+  in addition, getting an item that has been reput does not increase
+  the number of outstanding tasks.
+
+  This class shares an interface with Queue.Queue and provides the
+  additional reput method.
+  """
+
+  def __init__(self,
+               queue_capacity,
+               requeue_capacity=None,
+               queue_factory=Queue.Queue,
+               get_time=time.time):
+    """Initialize a ReQueue instance.
+
+    Args:
+      queue_capacity: The number of items that can be put in the ReQueue.
+      requeue_capacity: The numer of items that can be reput in the ReQueue.
+      queue_factory: Used for dependency injection.
+      get_time: Used for dependency injection.
+    """
+    if requeue_capacity is None:
+      requeue_capacity = queue_capacity
+
+    self.get_time = get_time
+    self.queue = queue_factory(queue_capacity)
+    self.requeue = queue_factory(requeue_capacity)
+    self.lock = threading.Lock()
+    self.put_cond = threading.Condition(self.lock)
+    self.get_cond = threading.Condition(self.lock)
+
+  def _DoWithTimeout(self,
+                     action,
+                     exc,
+                     wait_cond,
+                     done_cond,
+                     lock,
+                     timeout=None,
+                     block=True):
+    """Performs the given action with a timeout.
+
+    The action must be non-blocking, and raise an instance of exc on a
+    recoverable failure.  If the action fails with an instance of exc,
+    we wait on wait_cond before trying again.  Failure after the
+    timeout is reached is propagated as an exception.  Success is
+    signalled by notifying on done_cond and returning the result of
+    the action.  If action raises any exception besides an instance of
+    exc, it is immediately propagated.
+
+    Args:
+      action: A callable that performs a non-blocking action.
+      exc: An exception type that is thrown by the action to indicate
+        a recoverable error.
+      wait_cond: A condition variable which should be waited on when
+        action throws exc.
+      done_cond: A condition variable to signal if the action returns.
+      lock: The lock used by wait_cond and done_cond.
+      timeout: A non-negative float indicating the maximum time to wait.
+      block: Whether to block if the action cannot complete immediately.
+
+    Returns:
+      The result of the action, if it is successful.
+
+    Raises:
+      ValueError: If the timeout argument is negative.
+    """
+    if timeout is not None and timeout < 0.0:
+      raise ValueError('\'timeout\' must not be a negative  number')
+    if not block:
+      timeout = 0.0
+    result = None
+    success = False
+    start_time = self.get_time()
+    lock.acquire()
+    try:
+      while not success:
+        try:
+          result = action()
+          success = True
+        except Exception, e:
+          if not isinstance(e, exc):
+            raise e
+          if timeout is not None:
+            elapsed_time = self.get_time() - start_time
+            timeout -= elapsed_time
+            if timeout <= 0.0:
+              raise e
+          wait_cond.wait(timeout)
+    finally:
+      if success:
+        done_cond.notify()
+      lock.release()
+    return result
+
+  def put(self, item, block=True, timeout=None):
+    """Put an item into the requeue.
+
+    Args:
+      item: An item to add to the requeue.
+      block: Whether to block if the requeue is full.
+      timeout: Maximum on how long to wait until the queue is non-full.
+
+    Raises:
+      Queue.Full if the queue is full and the timeout expires.
+    """
+    def PutAction():
+      self.queue.put(item, block=False)
+    self._DoWithTimeout(PutAction,
+                        Queue.Full,
+                        self.get_cond,
+                        self.put_cond,
+                        self.lock,
+                        timeout=timeout,
+                        block=block)
+
+  def reput(self, item, block=True, timeout=None):
+    """Re-put an item back into the requeue.
+
+    Re-putting an item does not increase the number of outstanding
+    tasks, so the reput item should be uniquely associated with an
+    item that was previously removed from the requeue and for which
+    TaskDone has not been called.
+
+    Args:
+      item: An item to add to the requeue.
+      block: Whether to block if the requeue is full.
+      timeout: Maximum on how long to wait until the queue is non-full.
+
+    Raises:
+      Queue.Full is the queue is full and the timeout expires.
+    """
+    def ReputAction():
+      self.requeue.put(item, block=False)
+    self._DoWithTimeout(ReputAction,
+                        Queue.Full,
+                        self.get_cond,
+                        self.put_cond,
+                        self.lock,
+                        timeout=timeout,
+                        block=block)
+
+  def get(self, block=True, timeout=None):
+    """Get an item from the requeue.
+
+    Args:
+      block: Whether to block if the requeue is empty.
+      timeout: Maximum on how long to wait until the requeue is non-empty.
+
+    Returns:
+      An item from the requeue.
+
+    Raises:
+      Queue.Empty if the queue is empty and the timeout expires.
+    """
+    def GetAction():
+      try:
+        result = self.requeue.get(block=False)
+        self.requeue.task_done()
+      except Queue.Empty:
+        result = self.queue.get(block=False)
+      return result
+    return self._DoWithTimeout(GetAction,
+                               Queue.Empty,
+                               self.put_cond,
+                               self.get_cond,
+                               self.lock,
+                               timeout=timeout,
+                               block=block)
+
+  def join(self):
+    """Blocks until all of the items in the requeue have been processed."""
+    self.queue.join()
+
+  def task_done(self):
+    """Indicate that a previously enqueued item has been fully processed."""
+    self.queue.task_done()
+
+  def empty(self):
+    """Returns true if the requeue is empty."""
+    return self.queue.empty() and self.requeue.empty()
+
+  def get_nowait(self):
+    """Try to get an item from the queue without blocking."""
+    return self.get(block=False)
+
+  def qsize(self):
+    return self.queue.qsize() + self.requeue.qsize()
--- a/thirdparty/google_appengine/google/net/proto/ProtocolBuffer.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/google/net/proto/ProtocolBuffer.py	Sun Sep 06 23:31:53 2009 +0200
@@ -344,13 +344,13 @@
 
   def putFloat(self, v):
     a = array.array('B')
-    a.fromstring(struct.pack("f", v))
+    a.fromstring(struct.pack("<f", v))
     self.buf.extend(a)
     return
 
   def putDouble(self, v):
     a = array.array('B')
-    a.fromstring(struct.pack("d", v))
+    a.fromstring(struct.pack("<d", v))
     self.buf.extend(a)
     return
 
@@ -362,6 +362,7 @@
     return
 
   def putPrefixedString(self, v):
+    v = str(v)
     self.putVarInt32(len(v))
     self.buf.fromstring(v)
     return
@@ -499,13 +500,13 @@
     if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
     a = self.buf[self.idx:self.idx+4]
     self.idx += 4
-    return struct.unpack("f", a)[0]
+    return struct.unpack("<f", a)[0]
 
   def getDouble(self):
     if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
     a = self.buf[self.idx:self.idx+8]
     self.idx += 8
-    return struct.unpack("d", a)[0]
+    return struct.unpack("<d", a)[0]
 
   def getBoolean(self):
     b = self.get8()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/google/net/proto/message_set.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,291 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""This module contains the MessageSet class, which is a special kind of
+protocol message which can contain other protocol messages without knowing
+their types.  See the class's doc string for more information."""
+
+
+from google.net.proto import ProtocolBuffer
+import logging
+
+TAG_BEGIN_ITEM_GROUP = 11
+TAG_END_ITEM_GROUP   = 12
+TAG_TYPE_ID          = 16
+TAG_MESSAGE          = 26
+
+class Item:
+
+  def __init__(self, message, message_class=None):
+    self.message = message
+    self.message_class = message_class
+
+  def SetToDefaultInstance(self, message_class):
+    self.message = message_class()
+    self.message_class = message_class
+
+  def Parse(self, message_class):
+
+    if self.message_class is not None:
+      return 1
+
+    try:
+      self.message = message_class(self.message)
+      self.message_class = message_class
+      return 1
+    except ProtocolBuffer.ProtocolBufferDecodeError:
+      logging.warn("Parse error in message inside MessageSet.  Tried "
+                   "to parse as: " + message_class.__name__)
+      return 0
+
+  def MergeFrom(self, other):
+
+    if self.message_class is not None:
+      if other.Parse(self.message_class):
+        self.message.MergeFrom(other.message)
+
+    elif other.message_class is not None:
+      if not self.Parse(other.message_class):
+        self.message = other.message_class()
+        self.message_class = other.message_class
+      self.message.MergeFrom(other.message)
+
+    else:
+      self.message += other.message
+
+  def Copy(self):
+
+    if self.message_class is None:
+      return Item(self.message)
+    else:
+      new_message = self.message_class()
+      new_message.CopyFrom(self.message)
+      return Item(new_message, self.message_class)
+
+  def Equals(self, other):
+
+    if self.message_class is not None:
+      if not other.Parse(self.message_class): return 0
+      return self.message.Equals(other.message)
+
+    elif other.message_class is not None:
+      if not self.Parse(other.message_class): return 0
+      return self.message.Equals(other.message)
+
+    else:
+      return self.message == other.message
+
+  def IsInitialized(self, debug_strs=None):
+
+    if self.message_class is None:
+      return 1
+    else:
+      return self.message.IsInitialized(debug_strs)
+
+  def ByteSize(self, pb, type_id):
+
+    message_length = 0
+    if self.message_class is None:
+      message_length = len(self.message)
+    else:
+      message_length = self.message.ByteSize()
+
+    return pb.lengthString(message_length) + pb.lengthVarInt64(type_id) + 2
+
+  def OutputUnchecked(self, out, type_id):
+
+    out.putVarInt32(TAG_TYPE_ID)
+    out.putVarUint64(type_id)
+    out.putVarInt32(TAG_MESSAGE)
+    if self.message_class is None:
+      out.putPrefixedString(self.message)
+    else:
+      out.putVarInt32(self.message.ByteSize())
+      self.message.OutputUnchecked(out)
+
+  def Decode(decoder):
+
+    type_id = 0
+    message = None
+    while 1:
+      tag = decoder.getVarInt32()
+      if tag == TAG_END_ITEM_GROUP:
+        break
+      if tag == TAG_TYPE_ID:
+        type_id = decoder.getVarUint64()
+        continue
+      if tag == TAG_MESSAGE:
+        message = decoder.getPrefixedString()
+        continue
+      if tag == 0: raise ProtocolBuffer.ProtocolBufferDecodeError
+      decoder.skipData(tag)
+
+    if type_id == 0 or message is None:
+      raise ProtocolBuffer.ProtocolBufferDecodeError
+    return (type_id, message)
+  Decode = staticmethod(Decode)
+
+
+class MessageSet(ProtocolBuffer.ProtocolMessage):
+
+  def __init__(self, contents=None):
+    self.items = dict()
+    if contents is not None: self.MergeFromString(contents)
+
+
+  def get(self, message_class):
+
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      return message_class()
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    if item.Parse(message_class):
+      return item.message
+    else:
+      return message_class()
+
+  def mutable(self, message_class):
+
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      message = message_class()
+      self.items[message_class.MESSAGE_TYPE_ID] = Item(message, message_class)
+      return message
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    if not item.Parse(message_class):
+      item.SetToDefaultInstance(message_class)
+    return item.message
+
+  def has(self, message_class):
+
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      return 0
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    return item.Parse(message_class)
+
+  def has_unparsed(self, message_class):
+    return message_class.MESSAGE_TYPE_ID in self.items
+
+  def GetTypeIds(self):
+    return self.items.keys()
+
+  def NumMessages(self):
+    return len(self.items)
+
+  def remove(self, message_class):
+    if message_class.MESSAGE_TYPE_ID in self.items:
+      del self.items[message_class.MESSAGE_TYPE_ID]
+
+
+  def __getitem__(self, message_class):
+    if message_class.MESSAGE_TYPE_ID not in self.items:
+      raise KeyError(message_class)
+    item = self.items[message_class.MESSAGE_TYPE_ID]
+    if item.Parse(message_class):
+      return item.message
+    else:
+      raise KeyError(message_class)
+
+  def __setitem__(self, message_class, message):
+    self.items[message_class.MESSAGE_TYPE_ID] = Item(message, message_class)
+
+  def __contains__(self, message_class):
+    return self.has(message_class)
+
+  def __delitem__(self, message_class):
+    self.remove(message_class)
+
+  def __len__(self):
+    return len(self.items)
+
+
+  def MergeFrom(self, other):
+
+    assert other is not self
+
+    for (type_id, item) in other.items.items():
+      if type_id in self.items:
+        self.items[type_id].MergeFrom(item)
+      else:
+        self.items[type_id] = item.Copy()
+
+  def Equals(self, other):
+    if other is self: return 1
+    if len(self.items) != len(other.items): return 0
+
+    for (type_id, item) in other.items.items():
+      if type_id not in self.items: return 0
+      if not self.items[type_id].Equals(item): return 0
+
+    return 1
+
+  def __eq__(self, other):
+    return ((other is not None)
+        and (other.__class__ == self.__class__)
+        and self.Equals(other))
+
+  def __ne__(self, other):
+    return not (self == other)
+
+  def IsInitialized(self, debug_strs=None):
+
+    initialized = 1
+    for item in self.items.values():
+      if not item.IsInitialized(debug_strs):
+        initialized = 0
+    return initialized
+
+  def ByteSize(self):
+    n = 2 * len(self.items)
+    for (type_id, item) in self.items.items():
+      n += item.ByteSize(self, type_id)
+    return n
+
+  def Clear(self):
+    self.items = dict()
+
+  def OutputUnchecked(self, out):
+    for (type_id, item) in self.items.items():
+      out.putVarInt32(TAG_BEGIN_ITEM_GROUP)
+      item.OutputUnchecked(out, type_id)
+      out.putVarInt32(TAG_END_ITEM_GROUP)
+
+  def TryMerge(self, decoder):
+    while decoder.avail() > 0:
+      tag = decoder.getVarInt32()
+      if tag == TAG_BEGIN_ITEM_GROUP:
+        (type_id, message) = Item.Decode(decoder)
+        if type_id in self.items:
+          self.items[type_id].MergeFrom(Item(message))
+        else:
+          self.items[type_id] = Item(message)
+        continue
+      if (tag == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+      decoder.skipData(tag)
+
+  def __str__(self, prefix="", printElemNumber=0):
+    text = ""
+    for (type_id, item) in self.items.items():
+      if item.message_class is None:
+        text += "%s[%d] <\n" % (prefix, type_id)
+        text += "%s  (%d bytes)\n" % (prefix, len(item.message))
+        text += "%s>\n" % prefix
+      else:
+        text += "%s[%s] <\n" % (prefix, item.message_class.__name__)
+        text += item.message.__str__(prefix + "  ", printElemNumber)
+        text += "%s>\n" % prefix
+    return text
+
+__all__ = ['MessageSet']
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/lib/django/PKG-INFO	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,11 @@
+Metadata-Version: 1.0
+Name: Django
+Version: 0.96.4
+Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design.
+Home-page: http://www.djangoproject.com/
+Author: Django Software Foundation
+Author-email: foundation@djangoproject.com
+License: UNKNOWN
+Download-URL: http://media.djangoproject.com/releases/0.96/Django-0.96.4.tar.gz
+Description: UNKNOWN
+Platform: UNKNOWN
--- a/thirdparty/google_appengine/lib/django/django/__init__.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/__init__.py	Sun Sep 06 23:31:53 2009 +0200
@@ -1,1 +1,1 @@
-VERSION = (0, 96.1, None)
+VERSION = (0, 96.4, None)
--- a/thirdparty/google_appengine/lib/django/django/conf/global_settings.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/conf/global_settings.py	Sun Sep 06 23:31:53 2009 +0200
@@ -237,7 +237,7 @@
 
 # The User-Agent string to use when checking for URL validity through the
 # isExistingURL validator.
-URL_VALIDATOR_USER_AGENT = "Django/0.96.1 (http://www.djangoproject.com)"
+URL_VALIDATOR_USER_AGENT = "Django/0.96.2 (http://www.djangoproject.com)"
 
 ##############
 # MIDDLEWARE #
--- a/thirdparty/google_appengine/lib/django/django/contrib/admin/templates/admin/login.html	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/contrib/admin/templates/admin/login.html	Sun Sep 06 23:31:53 2009 +0200
@@ -19,7 +19,6 @@
   <div class="form-row">
     <label for="id_password">{% trans 'Password:' %}</label> <input type="password" name="password" id="id_password" />
     <input type="hidden" name="this_is_the_login_form" value="1" />
-    <input type="hidden" name="post_data" value="{{ post_data }}" /> {#<span class="help">{% trans 'Have you <a href="/password_reset/">forgotten your password</a>?' %}</span>#}
   </div>
   <div class="submit-row">
     <label>&nbsp;</label><input type="submit" value="{% trans 'Log in' %}" />
--- a/thirdparty/google_appengine/lib/django/django/contrib/admin/views/decorators.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/contrib/admin/views/decorators.py	Sun Sep 06 23:31:53 2009 +0200
@@ -3,43 +3,21 @@
 from django.contrib.auth.models import User
 from django.contrib.auth import authenticate, login
 from django.shortcuts import render_to_response
+from django.utils.html import escape
 from django.utils.translation import gettext_lazy
-import base64, datetime, md5
-import cPickle as pickle
+import base64, datetime
 
 ERROR_MESSAGE = gettext_lazy("Please enter a correct username and password. Note that both fields are case-sensitive.")
 LOGIN_FORM_KEY = 'this_is_the_login_form'
 
 def _display_login_form(request, error_message=''):
     request.session.set_test_cookie()
-    if request.POST and request.POST.has_key('post_data'):
-        # User has failed login BUT has previously saved post data.
-        post_data = request.POST['post_data']
-    elif request.POST:
-        # User's session must have expired; save their post data.
-        post_data = _encode_post_data(request.POST)
-    else:
-        post_data = _encode_post_data({})
     return render_to_response('admin/login.html', {
         'title': _('Log in'),
-        'app_path': request.path,
-        'post_data': post_data,
+        'app_path': escape(request.path),
         'error_message': error_message
     }, context_instance=template.RequestContext(request))
 
-def _encode_post_data(post_data):
-    pickled = pickle.dumps(post_data)
-    pickled_md5 = md5.new(pickled + settings.SECRET_KEY).hexdigest()
-    return base64.encodestring(pickled + pickled_md5)
-
-def _decode_post_data(encoded_data):
-    encoded_data = base64.decodestring(encoded_data)
-    pickled, tamper_check = encoded_data[:-32], encoded_data[-32:]
-    if md5.new(pickled + settings.SECRET_KEY).hexdigest() != tamper_check:
-        from django.core.exceptions import SuspiciousOperation
-        raise SuspiciousOperation, "User may have tampered with session cookie."
-    return pickle.loads(pickled)
-
 def staff_member_required(view_func):
     """
     Decorator for views that checks that the user is logged in and is a staff
@@ -48,10 +26,6 @@
     def _checklogin(request, *args, **kwargs):
         if request.user.is_authenticated() and request.user.is_staff:
             # The user is valid. Continue to the admin page.
-            if request.POST.has_key('post_data'):
-                # User must have re-authenticated through a different window
-                # or tab.
-                request.POST = _decode_post_data(request.POST['post_data'])
             return view_func(request, *args, **kwargs)
 
         assert hasattr(request, 'session'), "The Django admin requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'."
@@ -59,7 +33,7 @@
         # If this isn't already the login page, display it.
         if not request.POST.has_key(LOGIN_FORM_KEY):
             if request.POST:
-                message = _("Please log in again, because your session has expired. Don't worry: Your submission has been saved.")
+                message = _("Please log in again, because your session has expired.")
             else:
                 message = ""
             return _display_login_form(request, message)
@@ -92,16 +66,7 @@
                 # TODO: set last_login with an event.
                 user.last_login = datetime.datetime.now()
                 user.save()
-                if request.POST.has_key('post_data'):
-                    post_data = _decode_post_data(request.POST['post_data'])
-                    if post_data and not post_data.has_key(LOGIN_FORM_KEY):
-                        # overwrite request.POST with the saved post_data, and continue
-                        request.POST = post_data
-                        request.user = user
-                        return view_func(request, *args, **kwargs)
-                    else:
-                        request.session.delete_test_cookie()
-                        return http.HttpResponseRedirect(request.path)
+                return http.HttpResponseRedirect(request.path)
             else:
                 return _display_login_form(request, ERROR_MESSAGE)
 
--- a/thirdparty/google_appengine/lib/django/django/core/management.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/core/management.py	Sun Sep 06 23:31:53 2009 +0200
@@ -1192,9 +1192,7 @@
         print "Development server is running at http://%s:%s/" % (addr, port)
         print "Quit the server with %s." % quit_command
         try:
-            import django
-            path = admin_media_dir or django.__path__[0] + '/contrib/admin/media'
-            handler = AdminMediaHandler(WSGIHandler(), path)
+            handler = AdminMediaHandler(WSGIHandler(), admin_media_path)
             run(addr, int(port), handler)
         except WSGIServerException, e:
             # Use helpful error messages instead of ugly tracebacks.
--- a/thirdparty/google_appengine/lib/django/django/core/servers/basehttp.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/django/core/servers/basehttp.py	Sun Sep 06 23:31:53 2009 +0200
@@ -11,6 +11,8 @@
 from types import ListType, StringType
 import os, re, sys, time, urllib
 
+from django.utils._os import safe_join
+
 __version__ = "0.1"
 __all__ = ['WSGIServer','WSGIRequestHandler','demo_app']
 
@@ -599,11 +601,25 @@
         self.application = application
         if not media_dir:
             import django
-            self.media_dir = django.__path__[0] + '/contrib/admin/media'
+            self.media_dir = \
+                os.path.join(django.__path__[0], 'contrib', 'admin', 'media')
         else:
             self.media_dir = media_dir
         self.media_url = settings.ADMIN_MEDIA_PREFIX
 
+    def file_path(self, url):
+        """
+        Returns the path to the media file on disk for the given URL.
+
+        The passed URL is assumed to begin with ADMIN_MEDIA_PREFIX.  If the
+        resultant file path is outside the media directory, then a ValueError
+        is raised.
+        """
+        # Remove ADMIN_MEDIA_PREFIX.
+        relative_url = url[len(self.media_url):]
+        relative_path = urllib.url2pathname(relative_url)
+        return safe_join(self.media_dir, relative_path)
+
     def __call__(self, environ, start_response):
         import os.path
 
@@ -614,19 +630,25 @@
             return self.application(environ, start_response)
 
         # Find the admin file and serve it up, if it exists and is readable.
-        relative_url = environ['PATH_INFO'][len(self.media_url):]
-        file_path = os.path.join(self.media_dir, relative_url)
+        try:
+            file_path = self.file_path(environ['PATH_INFO'])
+        except ValueError: # Resulting file path was not valid.
+            status = '404 NOT FOUND'
+            headers = {'Content-type': 'text/plain'}
+            output = ['Page not found: %s' % environ['PATH_INFO']]
+            start_response(status, headers.items())
+            return output
         if not os.path.exists(file_path):
             status = '404 NOT FOUND'
             headers = {'Content-type': 'text/plain'}
-            output = ['Page not found: %s' % file_path]
+            output = ['Page not found: %s' % environ['PATH_INFO']]
         else:
             try:
                 fp = open(file_path, 'rb')
             except IOError:
                 status = '401 UNAUTHORIZED'
                 headers = {'Content-type': 'text/plain'}
-                output = ['Permission denied: %s' % file_path]
+                output = ['Permission denied: %s' % environ['PATH_INFO']]
             else:
                 status = '200 OK'
                 headers = {}
--- a/thirdparty/google_appengine/lib/django/setup.py	Sat Sep 05 14:04:24 2009 +0200
+++ b/thirdparty/google_appengine/lib/django/setup.py	Sun Sep 06 23:31:53 2009 +0200
@@ -34,10 +34,11 @@
 
 setup(
     name = "Django",
-    version = "0.96.1",
+    version = "0.96.4",
     url = 'http://www.djangoproject.com/',
-    author = 'Lawrence Journal-World',
-    author_email = 'holovaty@gmail.com',
+    author = 'Django Software Foundation',
+    author_email = 'foundation@djangoproject.com',
+    download_url = 'http://media.djangoproject.com/releases/0.96/Django-0.96.4.tar.gz',
     description = 'A high-level Python Web framework that encourages rapid development and clean, pragmatic design.',
     packages = packages,
     data_files = data_files,
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/thirdparty/google_appengine/remote_api_shell.py	Sun Sep 06 23:31:53 2009 +0200
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Convenience wrapper for starting an appengine tool."""
+
+
+import os
+import sys
+
+if not hasattr(sys, 'version_info'):
+  sys.stderr.write('Very old versions of Python are not supported. Please '
+                   'use version 2.5 or greater.\n')
+  sys.exit(1)
+version_tuple = tuple(sys.version_info[:2])
+if version_tuple < (2, 4):
+  sys.stderr.write('Error: Python %d.%d is not supported. Please use '
+                   'version 2.5 or greater.\n' % version_tuple)
+  sys.exit(1)
+if version_tuple == (2, 4):
+  sys.stderr.write('Warning: Python 2.4 is not supported; this program may '
+                   'break. Please use version 2.5 or greater.\n')
+
+DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
+SCRIPT_DIR = os.path.join(DIR_PATH, 'google', 'appengine', 'tools')
+
+EXTRA_PATHS = [
+  DIR_PATH,
+  os.path.join(DIR_PATH, 'lib', 'antlr3'),
+  os.path.join(DIR_PATH, 'lib', 'django'),
+  os.path.join(DIR_PATH, 'lib', 'webob'),
+  os.path.join(DIR_PATH, 'lib', 'yaml', 'lib'),
+]
+
+SCRIPT_EXCEPTIONS = {
+  "dev_appserver.py" : "dev_appserver_main.py"
+}
+
+def run_file(file_path, globals_, script_dir=SCRIPT_DIR):
+  """Execute the file at the specified path with the passed-in globals."""
+  sys.path = EXTRA_PATHS + sys.path
+  script_name = os.path.basename(file_path)
+  script_name = SCRIPT_EXCEPTIONS.get(script_name, script_name)
+  script_path = os.path.join(script_dir, script_name)
+  execfile(script_path, globals_)
+
+if __name__ == '__main__':
+  run_file(__file__, globals())